1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code
{
84 static void encode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
, HOST_WIDE_INT
);
85 static void decode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
*, HOST_WIDE_INT
*);
86 static bool negate_mathfn_p (enum built_in_function
);
87 static bool negate_expr_p (tree
);
88 static tree
negate_expr (tree
);
89 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
90 static tree
associate_trees (tree
, tree
, enum tree_code
, tree
);
91 static tree
const_binop (enum tree_code
, tree
, tree
, int);
92 static enum comparison_code
comparison_to_compcode (enum tree_code
);
93 static enum tree_code
compcode_to_comparison (enum comparison_code
);
94 static tree
combine_comparisons (enum tree_code
, enum tree_code
,
95 enum tree_code
, tree
, tree
, tree
);
96 static int truth_value_p (enum tree_code
);
97 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
98 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
99 static tree
eval_subst (tree
, tree
, tree
, tree
, tree
);
100 static tree
pedantic_omit_one_operand (tree
, tree
, tree
);
101 static tree
distribute_bit_expr (enum tree_code
, tree
, tree
, tree
);
102 static tree
make_bit_field_ref (tree
, tree
, int, int, int);
103 static tree
optimize_bit_field_compare (enum tree_code
, tree
, tree
, tree
);
104 static tree
decode_field_reference (tree
, HOST_WIDE_INT
*, HOST_WIDE_INT
*,
105 enum machine_mode
*, int *, int *,
107 static int all_ones_mask_p (tree
, int);
108 static tree
sign_bit_p (tree
, tree
);
109 static int simple_operand_p (tree
);
110 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
111 static tree
make_range (tree
, int *, tree
*, tree
*);
112 static tree
build_range_check (tree
, tree
, int, tree
, tree
);
113 static int merge_ranges (int *, tree
*, tree
*, int, tree
, tree
, int, tree
,
115 static tree
fold_range_test (enum tree_code
, tree
, tree
, tree
);
116 static tree
fold_cond_expr_with_comparison (tree
, tree
, tree
, tree
);
117 static tree
unextend (tree
, int, int, tree
);
118 static tree
fold_truthop (enum tree_code
, tree
, tree
, tree
);
119 static tree
optimize_minmax_comparison (enum tree_code
, tree
, tree
, tree
);
120 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
);
121 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
);
122 static int multiple_of_p (tree
, tree
, tree
);
123 static tree
fold_binary_op_with_conditional_arg (enum tree_code
, tree
,
126 static bool fold_real_zero_addition_p (tree
, tree
, int);
127 static tree
fold_mathfn_compare (enum built_in_function
, enum tree_code
,
129 static tree
fold_inf_compare (enum tree_code
, tree
, tree
, tree
);
130 static tree
fold_div_compare (enum tree_code
, tree
, tree
, tree
);
131 static bool reorder_operands_p (tree
, tree
);
132 static tree
fold_negate_const (tree
, tree
);
133 static tree
fold_not_const (tree
, tree
);
134 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
136 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
137 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
138 and SUM1. Then this yields nonzero if overflow occurred during the
141 Overflow occurs if A and B have the same sign, but A and SUM differ in
142 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
144 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
146 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
147 We do that by representing the two-word integer in 4 words, with only
148 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
149 number. The value of the word is LOWPART + HIGHPART * BASE. */
152 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
153 #define HIGHPART(x) \
154 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
155 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
157 /* Unpack a two-word integer into 4 words.
158 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
159 WORDS points to the array of HOST_WIDE_INTs. */
162 encode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT low
, HOST_WIDE_INT hi
)
164 words
[0] = LOWPART (low
);
165 words
[1] = HIGHPART (low
);
166 words
[2] = LOWPART (hi
);
167 words
[3] = HIGHPART (hi
);
170 /* Pack an array of 4 words into a two-word integer.
171 WORDS points to the array of words.
172 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
175 decode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT
*low
,
178 *low
= words
[0] + words
[1] * BASE
;
179 *hi
= words
[2] + words
[3] * BASE
;
182 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
183 in overflow of the value, when >0 we are only interested in signed
184 overflow, for <0 we are interested in any overflow. OVERFLOWED
185 indicates whether overflow has already occurred. CONST_OVERFLOWED
186 indicates whether constant overflow has already occurred. We force
187 T's value to be within range of T's type (by setting to 0 or 1 all
188 the bits outside the type's range). We set TREE_OVERFLOWED if,
189 OVERFLOWED is nonzero,
190 or OVERFLOWABLE is >0 and signed overflow occurs
191 or OVERFLOWABLE is <0 and any overflow occurs
192 We set TREE_CONSTANT_OVERFLOWED if,
193 CONST_OVERFLOWED is nonzero
194 or we set TREE_OVERFLOWED.
195 We return either the original T, or a copy. */
198 force_fit_type (tree t
, int overflowable
,
199 bool overflowed
, bool overflowed_const
)
201 unsigned HOST_WIDE_INT low
;
204 int sign_extended_type
;
206 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
208 low
= TREE_INT_CST_LOW (t
);
209 high
= TREE_INT_CST_HIGH (t
);
211 if (POINTER_TYPE_P (TREE_TYPE (t
))
212 || TREE_CODE (TREE_TYPE (t
)) == OFFSET_TYPE
)
215 prec
= TYPE_PRECISION (TREE_TYPE (t
));
216 /* Size types *are* sign extended. */
217 sign_extended_type
= (!TYPE_UNSIGNED (TREE_TYPE (t
))
218 || (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (TREE_TYPE (t
))));
221 /* First clear all bits that are beyond the type's precision. */
223 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
225 else if (prec
> HOST_BITS_PER_WIDE_INT
)
226 high
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
230 if (prec
< HOST_BITS_PER_WIDE_INT
)
231 low
&= ~((HOST_WIDE_INT
) (-1) << prec
);
234 if (!sign_extended_type
)
235 /* No sign extension */;
236 else if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
237 /* Correct width already. */;
238 else if (prec
> HOST_BITS_PER_WIDE_INT
)
240 /* Sign extend top half? */
241 if (high
& ((unsigned HOST_WIDE_INT
)1
242 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
243 high
|= (HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
);
245 else if (prec
== HOST_BITS_PER_WIDE_INT
)
247 if ((HOST_WIDE_INT
)low
< 0)
252 /* Sign extend bottom half? */
253 if (low
& ((unsigned HOST_WIDE_INT
)1 << (prec
- 1)))
256 low
|= (HOST_WIDE_INT
)(-1) << prec
;
260 /* If the value changed, return a new node. */
261 if (overflowed
|| overflowed_const
262 || low
!= TREE_INT_CST_LOW (t
) || high
!= TREE_INT_CST_HIGH (t
))
264 t
= build_int_cst_wide (TREE_TYPE (t
), low
, high
);
268 || (overflowable
> 0 && sign_extended_type
))
271 TREE_OVERFLOW (t
) = 1;
272 TREE_CONSTANT_OVERFLOW (t
) = 1;
274 else if (overflowed_const
)
277 TREE_CONSTANT_OVERFLOW (t
) = 1;
284 /* Add two doubleword integers with doubleword result.
285 Each argument is given as two `HOST_WIDE_INT' pieces.
286 One argument is L1 and H1; the other, L2 and H2.
287 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
290 add_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
291 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
292 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
294 unsigned HOST_WIDE_INT l
;
298 h
= h1
+ h2
+ (l
< l1
);
302 return OVERFLOW_SUM_SIGN (h1
, h2
, h
);
305 /* Negate a doubleword integer with doubleword result.
306 Return nonzero if the operation overflows, assuming it's signed.
307 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
308 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
311 neg_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
312 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
318 return (*hv
& h1
) < 0;
328 /* Multiply two doubleword integers with doubleword result.
329 Return nonzero if the operation overflows, assuming it's signed.
330 Each argument is given as two `HOST_WIDE_INT' pieces.
331 One argument is L1 and H1; the other, L2 and H2.
332 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
335 mul_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
336 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
337 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
339 HOST_WIDE_INT arg1
[4];
340 HOST_WIDE_INT arg2
[4];
341 HOST_WIDE_INT prod
[4 * 2];
342 unsigned HOST_WIDE_INT carry
;
344 unsigned HOST_WIDE_INT toplow
, neglow
;
345 HOST_WIDE_INT tophigh
, neghigh
;
347 encode (arg1
, l1
, h1
);
348 encode (arg2
, l2
, h2
);
350 memset (prod
, 0, sizeof prod
);
352 for (i
= 0; i
< 4; i
++)
355 for (j
= 0; j
< 4; j
++)
358 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
359 carry
+= arg1
[i
] * arg2
[j
];
360 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
362 prod
[k
] = LOWPART (carry
);
363 carry
= HIGHPART (carry
);
368 decode (prod
, lv
, hv
); /* This ignores prod[4] through prod[4*2-1] */
370 /* Check for overflow by calculating the top half of the answer in full;
371 it should agree with the low half's sign bit. */
372 decode (prod
+ 4, &toplow
, &tophigh
);
375 neg_double (l2
, h2
, &neglow
, &neghigh
);
376 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
380 neg_double (l1
, h1
, &neglow
, &neghigh
);
381 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
383 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
386 /* Shift the doubleword integer in L1, H1 left by COUNT places
387 keeping only PREC bits of result.
388 Shift right if COUNT is negative.
389 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
390 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
393 lshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
394 HOST_WIDE_INT count
, unsigned int prec
,
395 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, int arith
)
397 unsigned HOST_WIDE_INT signmask
;
401 rshift_double (l1
, h1
, -count
, prec
, lv
, hv
, arith
);
405 if (SHIFT_COUNT_TRUNCATED
)
408 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
410 /* Shifting by the host word size is undefined according to the
411 ANSI standard, so we must handle this as a special case. */
415 else if (count
>= HOST_BITS_PER_WIDE_INT
)
417 *hv
= l1
<< (count
- HOST_BITS_PER_WIDE_INT
);
422 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
423 | (l1
>> (HOST_BITS_PER_WIDE_INT
- count
- 1) >> 1));
427 /* Sign extend all bits that are beyond the precision. */
429 signmask
= -((prec
> HOST_BITS_PER_WIDE_INT
430 ? ((unsigned HOST_WIDE_INT
) *hv
431 >> (prec
- HOST_BITS_PER_WIDE_INT
- 1))
432 : (*lv
>> (prec
- 1))) & 1);
434 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
436 else if (prec
>= HOST_BITS_PER_WIDE_INT
)
438 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
439 *hv
|= signmask
<< (prec
- HOST_BITS_PER_WIDE_INT
);
444 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
445 *lv
|= signmask
<< prec
;
449 /* Shift the doubleword integer in L1, H1 right by COUNT places
450 keeping only PREC bits of result. COUNT must be positive.
451 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
452 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
455 rshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
456 HOST_WIDE_INT count
, unsigned int prec
,
457 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
460 unsigned HOST_WIDE_INT signmask
;
463 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
466 if (SHIFT_COUNT_TRUNCATED
)
469 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
471 /* Shifting by the host word size is undefined according to the
472 ANSI standard, so we must handle this as a special case. */
476 else if (count
>= HOST_BITS_PER_WIDE_INT
)
479 *lv
= (unsigned HOST_WIDE_INT
) h1
>> (count
- HOST_BITS_PER_WIDE_INT
);
483 *hv
= (unsigned HOST_WIDE_INT
) h1
>> count
;
485 | ((unsigned HOST_WIDE_INT
) h1
<< (HOST_BITS_PER_WIDE_INT
- count
- 1) << 1));
488 /* Zero / sign extend all bits that are beyond the precision. */
490 if (count
>= (HOST_WIDE_INT
)prec
)
495 else if ((prec
- count
) >= 2 * HOST_BITS_PER_WIDE_INT
)
497 else if ((prec
- count
) >= HOST_BITS_PER_WIDE_INT
)
499 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- count
- HOST_BITS_PER_WIDE_INT
));
500 *hv
|= signmask
<< (prec
- count
- HOST_BITS_PER_WIDE_INT
);
505 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << (prec
- count
));
506 *lv
|= signmask
<< (prec
- count
);
510 /* Rotate the doubleword integer in L1, H1 left by COUNT places
511 keeping only PREC bits of result.
512 Rotate right if COUNT is negative.
513 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
516 lrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
517 HOST_WIDE_INT count
, unsigned int prec
,
518 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
520 unsigned HOST_WIDE_INT s1l
, s2l
;
521 HOST_WIDE_INT s1h
, s2h
;
527 lshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
528 rshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
533 /* Rotate the doubleword integer in L1, H1 left by COUNT places
534 keeping only PREC bits of result. COUNT must be positive.
535 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
538 rrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
539 HOST_WIDE_INT count
, unsigned int prec
,
540 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
542 unsigned HOST_WIDE_INT s1l
, s2l
;
543 HOST_WIDE_INT s1h
, s2h
;
549 rshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
550 lshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
555 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
556 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
557 CODE is a tree code for a kind of division, one of
558 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
560 It controls how the quotient is rounded to an integer.
561 Return nonzero if the operation overflows.
562 UNS nonzero says do unsigned division. */
565 div_and_round_double (enum tree_code code
, int uns
,
566 unsigned HOST_WIDE_INT lnum_orig
, /* num == numerator == dividend */
567 HOST_WIDE_INT hnum_orig
,
568 unsigned HOST_WIDE_INT lden_orig
, /* den == denominator == divisor */
569 HOST_WIDE_INT hden_orig
,
570 unsigned HOST_WIDE_INT
*lquo
,
571 HOST_WIDE_INT
*hquo
, unsigned HOST_WIDE_INT
*lrem
,
575 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
576 HOST_WIDE_INT den
[4], quo
[4];
578 unsigned HOST_WIDE_INT work
;
579 unsigned HOST_WIDE_INT carry
= 0;
580 unsigned HOST_WIDE_INT lnum
= lnum_orig
;
581 HOST_WIDE_INT hnum
= hnum_orig
;
582 unsigned HOST_WIDE_INT lden
= lden_orig
;
583 HOST_WIDE_INT hden
= hden_orig
;
586 if (hden
== 0 && lden
== 0)
587 overflow
= 1, lden
= 1;
589 /* Calculate quotient sign and convert operands to unsigned. */
595 /* (minimum integer) / (-1) is the only overflow case. */
596 if (neg_double (lnum
, hnum
, &lnum
, &hnum
)
597 && ((HOST_WIDE_INT
) lden
& hden
) == -1)
603 neg_double (lden
, hden
, &lden
, &hden
);
607 if (hnum
== 0 && hden
== 0)
608 { /* single precision */
610 /* This unsigned division rounds toward zero. */
616 { /* trivial case: dividend < divisor */
617 /* hden != 0 already checked. */
624 memset (quo
, 0, sizeof quo
);
626 memset (num
, 0, sizeof num
); /* to zero 9th element */
627 memset (den
, 0, sizeof den
);
629 encode (num
, lnum
, hnum
);
630 encode (den
, lden
, hden
);
632 /* Special code for when the divisor < BASE. */
633 if (hden
== 0 && lden
< (unsigned HOST_WIDE_INT
) BASE
)
635 /* hnum != 0 already checked. */
636 for (i
= 4 - 1; i
>= 0; i
--)
638 work
= num
[i
] + carry
* BASE
;
639 quo
[i
] = work
/ lden
;
645 /* Full double precision division,
646 with thanks to Don Knuth's "Seminumerical Algorithms". */
647 int num_hi_sig
, den_hi_sig
;
648 unsigned HOST_WIDE_INT quo_est
, scale
;
650 /* Find the highest nonzero divisor digit. */
651 for (i
= 4 - 1;; i
--)
658 /* Insure that the first digit of the divisor is at least BASE/2.
659 This is required by the quotient digit estimation algorithm. */
661 scale
= BASE
/ (den
[den_hi_sig
] + 1);
663 { /* scale divisor and dividend */
665 for (i
= 0; i
<= 4 - 1; i
++)
667 work
= (num
[i
] * scale
) + carry
;
668 num
[i
] = LOWPART (work
);
669 carry
= HIGHPART (work
);
674 for (i
= 0; i
<= 4 - 1; i
++)
676 work
= (den
[i
] * scale
) + carry
;
677 den
[i
] = LOWPART (work
);
678 carry
= HIGHPART (work
);
679 if (den
[i
] != 0) den_hi_sig
= i
;
686 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--)
688 /* Guess the next quotient digit, quo_est, by dividing the first
689 two remaining dividend digits by the high order quotient digit.
690 quo_est is never low and is at most 2 high. */
691 unsigned HOST_WIDE_INT tmp
;
693 num_hi_sig
= i
+ den_hi_sig
+ 1;
694 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
695 if (num
[num_hi_sig
] != den
[den_hi_sig
])
696 quo_est
= work
/ den
[den_hi_sig
];
700 /* Refine quo_est so it's usually correct, and at most one high. */
701 tmp
= work
- quo_est
* den
[den_hi_sig
];
703 && (den
[den_hi_sig
- 1] * quo_est
704 > (tmp
* BASE
+ num
[num_hi_sig
- 2])))
707 /* Try QUO_EST as the quotient digit, by multiplying the
708 divisor by QUO_EST and subtracting from the remaining dividend.
709 Keep in mind that QUO_EST is the I - 1st digit. */
712 for (j
= 0; j
<= den_hi_sig
; j
++)
714 work
= quo_est
* den
[j
] + carry
;
715 carry
= HIGHPART (work
);
716 work
= num
[i
+ j
] - LOWPART (work
);
717 num
[i
+ j
] = LOWPART (work
);
718 carry
+= HIGHPART (work
) != 0;
721 /* If quo_est was high by one, then num[i] went negative and
722 we need to correct things. */
723 if (num
[num_hi_sig
] < (HOST_WIDE_INT
) carry
)
726 carry
= 0; /* add divisor back in */
727 for (j
= 0; j
<= den_hi_sig
; j
++)
729 work
= num
[i
+ j
] + den
[j
] + carry
;
730 carry
= HIGHPART (work
);
731 num
[i
+ j
] = LOWPART (work
);
734 num
[num_hi_sig
] += carry
;
737 /* Store the quotient digit. */
742 decode (quo
, lquo
, hquo
);
745 /* If result is negative, make it so. */
747 neg_double (*lquo
, *hquo
, lquo
, hquo
);
749 /* Compute trial remainder: rem = num - (quo * den) */
750 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
751 neg_double (*lrem
, *hrem
, lrem
, hrem
);
752 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
757 case TRUNC_MOD_EXPR
: /* round toward zero */
758 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
762 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
763 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
766 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
774 case CEIL_MOD_EXPR
: /* round toward positive infinity */
775 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
777 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
785 case ROUND_MOD_EXPR
: /* round to closest integer */
787 unsigned HOST_WIDE_INT labs_rem
= *lrem
;
788 HOST_WIDE_INT habs_rem
= *hrem
;
789 unsigned HOST_WIDE_INT labs_den
= lden
, ltwice
;
790 HOST_WIDE_INT habs_den
= hden
, htwice
;
792 /* Get absolute values. */
794 neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
796 neg_double (lden
, hden
, &labs_den
, &habs_den
);
798 /* If (2 * abs (lrem) >= abs (lden)) */
799 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
800 labs_rem
, habs_rem
, <wice
, &htwice
);
802 if (((unsigned HOST_WIDE_INT
) habs_den
803 < (unsigned HOST_WIDE_INT
) htwice
)
804 || (((unsigned HOST_WIDE_INT
) habs_den
805 == (unsigned HOST_WIDE_INT
) htwice
)
806 && (labs_den
< ltwice
)))
810 add_double (*lquo
, *hquo
,
811 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
814 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
826 /* Compute true remainder: rem = num - (quo * den) */
827 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
828 neg_double (*lrem
, *hrem
, lrem
, hrem
);
829 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
833 /* If ARG2 divides ARG1 with zero remainder, carries out the division
834 of type CODE and returns the quotient.
835 Otherwise returns NULL_TREE. */
838 div_if_zero_remainder (enum tree_code code
, tree arg1
, tree arg2
)
840 unsigned HOST_WIDE_INT int1l
, int2l
;
841 HOST_WIDE_INT int1h
, int2h
;
842 unsigned HOST_WIDE_INT quol
, reml
;
843 HOST_WIDE_INT quoh
, remh
;
844 tree type
= TREE_TYPE (arg1
);
845 int uns
= TYPE_UNSIGNED (type
);
847 int1l
= TREE_INT_CST_LOW (arg1
);
848 int1h
= TREE_INT_CST_HIGH (arg1
);
849 int2l
= TREE_INT_CST_LOW (arg2
);
850 int2h
= TREE_INT_CST_HIGH (arg2
);
852 div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
853 &quol
, &quoh
, &reml
, &remh
);
854 if (remh
!= 0 || reml
!= 0)
857 return build_int_cst_wide (type
, quol
, quoh
);
860 /* Return true if built-in mathematical function specified by CODE
861 preserves the sign of it argument, i.e. -f(x) == f(-x). */
864 negate_mathfn_p (enum built_in_function code
)
888 /* Check whether we may negate an integer constant T without causing
892 may_negate_without_overflow_p (tree t
)
894 unsigned HOST_WIDE_INT val
;
898 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
900 type
= TREE_TYPE (t
);
901 if (TYPE_UNSIGNED (type
))
904 prec
= TYPE_PRECISION (type
);
905 if (prec
> HOST_BITS_PER_WIDE_INT
)
907 if (TREE_INT_CST_LOW (t
) != 0)
909 prec
-= HOST_BITS_PER_WIDE_INT
;
910 val
= TREE_INT_CST_HIGH (t
);
913 val
= TREE_INT_CST_LOW (t
);
914 if (prec
< HOST_BITS_PER_WIDE_INT
)
915 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
916 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
919 /* Determine whether an expression T can be cheaply negated using
920 the function negate_expr. */
923 negate_expr_p (tree t
)
930 type
= TREE_TYPE (t
);
933 switch (TREE_CODE (t
))
936 if (TYPE_UNSIGNED (type
) || ! flag_trapv
)
939 /* Check that -CST will not overflow type. */
940 return may_negate_without_overflow_p (t
);
947 return negate_expr_p (TREE_REALPART (t
))
948 && negate_expr_p (TREE_IMAGPART (t
));
951 if (FLOAT_TYPE_P (type
) && !flag_unsafe_math_optimizations
)
953 /* -(A + B) -> (-B) - A. */
954 if (negate_expr_p (TREE_OPERAND (t
, 1))
955 && reorder_operands_p (TREE_OPERAND (t
, 0),
956 TREE_OPERAND (t
, 1)))
958 /* -(A + B) -> (-A) - B. */
959 return negate_expr_p (TREE_OPERAND (t
, 0));
962 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
963 return (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
964 && reorder_operands_p (TREE_OPERAND (t
, 0),
965 TREE_OPERAND (t
, 1));
968 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
974 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
975 return negate_expr_p (TREE_OPERAND (t
, 1))
976 || negate_expr_p (TREE_OPERAND (t
, 0));
980 /* Negate -((double)float) as (double)(-float). */
981 if (TREE_CODE (type
) == REAL_TYPE
)
983 tree tem
= strip_float_extensions (t
);
985 return negate_expr_p (tem
);
990 /* Negate -f(x) as f(-x). */
991 if (negate_mathfn_p (builtin_mathfn_code (t
)))
992 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1)));
996 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
997 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
999 tree op1
= TREE_OPERAND (t
, 1);
1000 if (TREE_INT_CST_HIGH (op1
) == 0
1001 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1002 == TREE_INT_CST_LOW (op1
))
1013 /* Given T, an expression, return the negation of T. Allow for T to be
1014 null, in which case return null. */
1017 negate_expr (tree t
)
1025 type
= TREE_TYPE (t
);
1026 STRIP_SIGN_NOPS (t
);
1028 switch (TREE_CODE (t
))
1031 tem
= fold_negate_const (t
, type
);
1032 if (! TREE_OVERFLOW (tem
)
1033 || TYPE_UNSIGNED (type
)
1039 tem
= fold_negate_const (t
, type
);
1040 /* Two's complement FP formats, such as c4x, may overflow. */
1041 if (! TREE_OVERFLOW (tem
) || ! flag_trapping_math
)
1042 return fold_convert (type
, tem
);
1047 tree rpart
= negate_expr (TREE_REALPART (t
));
1048 tree ipart
= negate_expr (TREE_IMAGPART (t
));
1050 if ((TREE_CODE (rpart
) == REAL_CST
1051 && TREE_CODE (ipart
) == REAL_CST
)
1052 || (TREE_CODE (rpart
) == INTEGER_CST
1053 && TREE_CODE (ipart
) == INTEGER_CST
))
1054 return build_complex (type
, rpart
, ipart
);
1059 return fold_convert (type
, TREE_OPERAND (t
, 0));
1062 if (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
1064 /* -(A + B) -> (-B) - A. */
1065 if (negate_expr_p (TREE_OPERAND (t
, 1))
1066 && reorder_operands_p (TREE_OPERAND (t
, 0),
1067 TREE_OPERAND (t
, 1)))
1069 tem
= negate_expr (TREE_OPERAND (t
, 1));
1070 tem
= fold_build2 (MINUS_EXPR
, TREE_TYPE (t
),
1071 tem
, TREE_OPERAND (t
, 0));
1072 return fold_convert (type
, tem
);
1075 /* -(A + B) -> (-A) - B. */
1076 if (negate_expr_p (TREE_OPERAND (t
, 0)))
1078 tem
= negate_expr (TREE_OPERAND (t
, 0));
1079 tem
= fold_build2 (MINUS_EXPR
, TREE_TYPE (t
),
1080 tem
, TREE_OPERAND (t
, 1));
1081 return fold_convert (type
, tem
);
1087 /* - (A - B) -> B - A */
1088 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
1089 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
1090 return fold_convert (type
,
1091 fold_build2 (MINUS_EXPR
, TREE_TYPE (t
),
1092 TREE_OPERAND (t
, 1),
1093 TREE_OPERAND (t
, 0)));
1097 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
1103 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
1105 tem
= TREE_OPERAND (t
, 1);
1106 if (negate_expr_p (tem
))
1107 return fold_convert (type
,
1108 fold_build2 (TREE_CODE (t
), TREE_TYPE (t
),
1109 TREE_OPERAND (t
, 0),
1110 negate_expr (tem
)));
1111 tem
= TREE_OPERAND (t
, 0);
1112 if (negate_expr_p (tem
))
1113 return fold_convert (type
,
1114 fold_build2 (TREE_CODE (t
), TREE_TYPE (t
),
1116 TREE_OPERAND (t
, 1)));
1121 /* Convert -((double)float) into (double)(-float). */
1122 if (TREE_CODE (type
) == REAL_TYPE
)
1124 tem
= strip_float_extensions (t
);
1125 if (tem
!= t
&& negate_expr_p (tem
))
1126 return fold_convert (type
, negate_expr (tem
));
1131 /* Negate -f(x) as f(-x). */
1132 if (negate_mathfn_p (builtin_mathfn_code (t
))
1133 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1))))
1135 tree fndecl
, arg
, arglist
;
1137 fndecl
= get_callee_fndecl (t
);
1138 arg
= negate_expr (TREE_VALUE (TREE_OPERAND (t
, 1)));
1139 arglist
= build_tree_list (NULL_TREE
, arg
);
1140 return build_function_call_expr (fndecl
, arglist
);
1145 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1146 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1148 tree op1
= TREE_OPERAND (t
, 1);
1149 if (TREE_INT_CST_HIGH (op1
) == 0
1150 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1151 == TREE_INT_CST_LOW (op1
))
1153 tree ntype
= TYPE_UNSIGNED (type
)
1154 ? lang_hooks
.types
.signed_type (type
)
1155 : lang_hooks
.types
.unsigned_type (type
);
1156 tree temp
= fold_convert (ntype
, TREE_OPERAND (t
, 0));
1157 temp
= fold_build2 (RSHIFT_EXPR
, ntype
, temp
, op1
);
1158 return fold_convert (type
, temp
);
1167 tem
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
1168 return fold_convert (type
, tem
);
1171 /* Split a tree IN into a constant, literal and variable parts that could be
1172 combined with CODE to make IN. "constant" means an expression with
1173 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1174 commutative arithmetic operation. Store the constant part into *CONP,
1175 the literal in *LITP and return the variable part. If a part isn't
1176 present, set it to null. If the tree does not decompose in this way,
1177 return the entire tree as the variable part and the other parts as null.
1179 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1180 case, we negate an operand that was subtracted. Except if it is a
1181 literal for which we use *MINUS_LITP instead.
1183 If NEGATE_P is true, we are negating all of IN, again except a literal
1184 for which we use *MINUS_LITP instead.
1186 If IN is itself a literal or constant, return it as appropriate.
1188 Note that we do not guarantee that any of the three values will be the
1189 same type as IN, but they will have the same signedness and mode. */
1192 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
1193 tree
*minus_litp
, int negate_p
)
1201 /* Strip any conversions that don't change the machine mode or signedness. */
1202 STRIP_SIGN_NOPS (in
);
1204 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
)
1206 else if (TREE_CODE (in
) == code
1207 || (! FLOAT_TYPE_P (TREE_TYPE (in
))
1208 /* We can associate addition and subtraction together (even
1209 though the C standard doesn't say so) for integers because
1210 the value is not affected. For reals, the value might be
1211 affected, so we can't. */
1212 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
1213 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
1215 tree op0
= TREE_OPERAND (in
, 0);
1216 tree op1
= TREE_OPERAND (in
, 1);
1217 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
1218 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
1220 /* First see if either of the operands is a literal, then a constant. */
1221 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
)
1222 *litp
= op0
, op0
= 0;
1223 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
)
1224 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
1226 if (op0
!= 0 && TREE_CONSTANT (op0
))
1227 *conp
= op0
, op0
= 0;
1228 else if (op1
!= 0 && TREE_CONSTANT (op1
))
1229 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
1231 /* If we haven't dealt with either operand, this is not a case we can
1232 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1233 if (op0
!= 0 && op1
!= 0)
1238 var
= op1
, neg_var_p
= neg1_p
;
1240 /* Now do any needed negations. */
1242 *minus_litp
= *litp
, *litp
= 0;
1244 *conp
= negate_expr (*conp
);
1246 var
= negate_expr (var
);
1248 else if (TREE_CONSTANT (in
))
1256 *minus_litp
= *litp
, *litp
= 0;
1257 else if (*minus_litp
)
1258 *litp
= *minus_litp
, *minus_litp
= 0;
1259 *conp
= negate_expr (*conp
);
1260 var
= negate_expr (var
);
1266 /* Re-associate trees split by the above function. T1 and T2 are either
1267 expressions to associate or null. Return the new expression, if any. If
1268 we build an operation, do it in TYPE and with CODE. */
1271 associate_trees (tree t1
, tree t2
, enum tree_code code
, tree type
)
1278 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1279 try to fold this since we will have infinite recursion. But do
1280 deal with any NEGATE_EXPRs. */
1281 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
1282 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
1284 if (code
== PLUS_EXPR
)
1286 if (TREE_CODE (t1
) == NEGATE_EXPR
)
1287 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t2
),
1288 fold_convert (type
, TREE_OPERAND (t1
, 0)));
1289 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
1290 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t1
),
1291 fold_convert (type
, TREE_OPERAND (t2
, 0)));
1292 else if (integer_zerop (t2
))
1293 return fold_convert (type
, t1
);
1295 else if (code
== MINUS_EXPR
)
1297 if (integer_zerop (t2
))
1298 return fold_convert (type
, t1
);
1301 return build2 (code
, type
, fold_convert (type
, t1
),
1302 fold_convert (type
, t2
));
1305 return fold_build2 (code
, type
, fold_convert (type
, t1
),
1306 fold_convert (type
, t2
));
1309 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1310 to produce a new constant.
1312 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1315 int_const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1317 unsigned HOST_WIDE_INT int1l
, int2l
;
1318 HOST_WIDE_INT int1h
, int2h
;
1319 unsigned HOST_WIDE_INT low
;
1321 unsigned HOST_WIDE_INT garbagel
;
1322 HOST_WIDE_INT garbageh
;
1324 tree type
= TREE_TYPE (arg1
);
1325 int uns
= TYPE_UNSIGNED (type
);
1327 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
1330 int1l
= TREE_INT_CST_LOW (arg1
);
1331 int1h
= TREE_INT_CST_HIGH (arg1
);
1332 int2l
= TREE_INT_CST_LOW (arg2
);
1333 int2h
= TREE_INT_CST_HIGH (arg2
);
1338 low
= int1l
| int2l
, hi
= int1h
| int2h
;
1342 low
= int1l
^ int2l
, hi
= int1h
^ int2h
;
1346 low
= int1l
& int2l
, hi
= int1h
& int2h
;
1352 /* It's unclear from the C standard whether shifts can overflow.
1353 The following code ignores overflow; perhaps a C standard
1354 interpretation ruling is needed. */
1355 lshift_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1362 lrotate_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1367 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1371 neg_double (int2l
, int2h
, &low
, &hi
);
1372 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1373 overflow
= OVERFLOW_SUM_SIGN (hi
, int2h
, int1h
);
1377 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1380 case TRUNC_DIV_EXPR
:
1381 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1382 case EXACT_DIV_EXPR
:
1383 /* This is a shortcut for a common special case. */
1384 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1385 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1386 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1387 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1389 if (code
== CEIL_DIV_EXPR
)
1392 low
= int1l
/ int2l
, hi
= 0;
1396 /* ... fall through ... */
1398 case ROUND_DIV_EXPR
:
1399 if (int2h
== 0 && int2l
== 1)
1401 low
= int1l
, hi
= int1h
;
1404 if (int1l
== int2l
&& int1h
== int2h
1405 && ! (int1l
== 0 && int1h
== 0))
1410 overflow
= div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
1411 &low
, &hi
, &garbagel
, &garbageh
);
1414 case TRUNC_MOD_EXPR
:
1415 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1416 /* This is a shortcut for a common special case. */
1417 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1418 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1419 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1420 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1422 if (code
== CEIL_MOD_EXPR
)
1424 low
= int1l
% int2l
, hi
= 0;
1428 /* ... fall through ... */
1430 case ROUND_MOD_EXPR
:
1431 overflow
= div_and_round_double (code
, uns
,
1432 int1l
, int1h
, int2l
, int2h
,
1433 &garbagel
, &garbageh
, &low
, &hi
);
1439 low
= (((unsigned HOST_WIDE_INT
) int1h
1440 < (unsigned HOST_WIDE_INT
) int2h
)
1441 || (((unsigned HOST_WIDE_INT
) int1h
1442 == (unsigned HOST_WIDE_INT
) int2h
)
1445 low
= (int1h
< int2h
1446 || (int1h
== int2h
&& int1l
< int2l
));
1448 if (low
== (code
== MIN_EXPR
))
1449 low
= int1l
, hi
= int1h
;
1451 low
= int2l
, hi
= int2h
;
1458 t
= build_int_cst_wide (TREE_TYPE (arg1
), low
, hi
);
1462 /* Propagate overflow flags ourselves. */
1463 if (((!uns
|| is_sizetype
) && overflow
)
1464 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1467 TREE_OVERFLOW (t
) = 1;
1468 TREE_CONSTANT_OVERFLOW (t
) = 1;
1470 else if (TREE_CONSTANT_OVERFLOW (arg1
) | TREE_CONSTANT_OVERFLOW (arg2
))
1473 TREE_CONSTANT_OVERFLOW (t
) = 1;
1477 t
= force_fit_type (t
, 1,
1478 ((!uns
|| is_sizetype
) && overflow
)
1479 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
),
1480 TREE_CONSTANT_OVERFLOW (arg1
)
1481 | TREE_CONSTANT_OVERFLOW (arg2
));
1486 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1487 constant. We assume ARG1 and ARG2 have the same data type, or at least
1488 are the same kind of constant and the same machine mode.
1490 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1493 const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1498 if (TREE_CODE (arg1
) == INTEGER_CST
)
1499 return int_const_binop (code
, arg1
, arg2
, notrunc
);
1501 if (TREE_CODE (arg1
) == REAL_CST
)
1503 enum machine_mode mode
;
1506 REAL_VALUE_TYPE value
;
1507 REAL_VALUE_TYPE result
;
1511 d1
= TREE_REAL_CST (arg1
);
1512 d2
= TREE_REAL_CST (arg2
);
1514 type
= TREE_TYPE (arg1
);
1515 mode
= TYPE_MODE (type
);
1517 /* Don't perform operation if we honor signaling NaNs and
1518 either operand is a NaN. */
1519 if (HONOR_SNANS (mode
)
1520 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1523 /* Don't perform operation if it would raise a division
1524 by zero exception. */
1525 if (code
== RDIV_EXPR
1526 && REAL_VALUES_EQUAL (d2
, dconst0
)
1527 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1530 /* If either operand is a NaN, just return it. Otherwise, set up
1531 for floating-point trap; we return an overflow. */
1532 if (REAL_VALUE_ISNAN (d1
))
1534 else if (REAL_VALUE_ISNAN (d2
))
1537 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1538 real_convert (&result
, mode
, &value
);
1540 /* Don't constant fold this floating point operation if
1541 the result has overflowed and flag_trapping_math. */
1543 if (flag_trapping_math
1544 && MODE_HAS_INFINITIES (mode
)
1545 && REAL_VALUE_ISINF (result
)
1546 && !REAL_VALUE_ISINF (d1
)
1547 && !REAL_VALUE_ISINF (d2
))
1550 /* Don't constant fold this floating point operation if the
1551 result may dependent upon the run-time rounding mode and
1552 flag_rounding_math is set, or if GCC's software emulation
1553 is unable to accurately represent the result. */
1555 if ((flag_rounding_math
1556 || (REAL_MODE_FORMAT_COMPOSITE_P (mode
)
1557 && !flag_unsafe_math_optimizations
))
1558 && (inexact
|| !real_identical (&result
, &value
)))
1561 t
= build_real (type
, result
);
1563 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1564 TREE_CONSTANT_OVERFLOW (t
)
1566 | TREE_CONSTANT_OVERFLOW (arg1
)
1567 | TREE_CONSTANT_OVERFLOW (arg2
);
1570 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1572 tree type
= TREE_TYPE (arg1
);
1573 tree r1
= TREE_REALPART (arg1
);
1574 tree i1
= TREE_IMAGPART (arg1
);
1575 tree r2
= TREE_REALPART (arg2
);
1576 tree i2
= TREE_IMAGPART (arg2
);
1582 t
= build_complex (type
,
1583 const_binop (PLUS_EXPR
, r1
, r2
, notrunc
),
1584 const_binop (PLUS_EXPR
, i1
, i2
, notrunc
));
1588 t
= build_complex (type
,
1589 const_binop (MINUS_EXPR
, r1
, r2
, notrunc
),
1590 const_binop (MINUS_EXPR
, i1
, i2
, notrunc
));
1594 t
= build_complex (type
,
1595 const_binop (MINUS_EXPR
,
1596 const_binop (MULT_EXPR
,
1598 const_binop (MULT_EXPR
,
1601 const_binop (PLUS_EXPR
,
1602 const_binop (MULT_EXPR
,
1604 const_binop (MULT_EXPR
,
1611 tree t1
, t2
, real
, imag
;
1613 = const_binop (PLUS_EXPR
,
1614 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
1615 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
1618 t1
= const_binop (PLUS_EXPR
,
1619 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
1620 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
1622 t2
= const_binop (MINUS_EXPR
,
1623 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
1624 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
1627 if (INTEGRAL_TYPE_P (TREE_TYPE (r1
)))
1629 real
= const_binop (TRUNC_DIV_EXPR
, t1
, magsquared
, notrunc
);
1630 imag
= const_binop (TRUNC_DIV_EXPR
, t2
, magsquared
, notrunc
);
1634 real
= const_binop (RDIV_EXPR
, t1
, magsquared
, notrunc
);
1635 imag
= const_binop (RDIV_EXPR
, t2
, magsquared
, notrunc
);
1640 t
= build_complex (type
, real
, imag
);
1652 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1653 indicates which particular sizetype to create. */
1656 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1658 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1661 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1662 is a tree code. The type of the result is taken from the operands.
1663 Both must be the same type integer type and it must be a size type.
1664 If the operands are constant, so is the result. */
1667 size_binop (enum tree_code code
, tree arg0
, tree arg1
)
1669 tree type
= TREE_TYPE (arg0
);
1671 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
)
1672 && type
== TREE_TYPE (arg1
));
1674 /* Handle the special case of two integer constants faster. */
1675 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1677 /* And some specific cases even faster than that. */
1678 if (code
== PLUS_EXPR
&& integer_zerop (arg0
))
1680 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
1681 && integer_zerop (arg1
))
1683 else if (code
== MULT_EXPR
&& integer_onep (arg0
))
1686 /* Handle general case of two integer constants. */
1687 return int_const_binop (code
, arg0
, arg1
, 0);
1690 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1691 return error_mark_node
;
1693 return fold_build2 (code
, type
, arg0
, arg1
);
1696 /* Given two values, either both of sizetype or both of bitsizetype,
1697 compute the difference between the two values. Return the value
1698 in signed type corresponding to the type of the operands. */
1701 size_diffop (tree arg0
, tree arg1
)
1703 tree type
= TREE_TYPE (arg0
);
1706 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
)
1707 && type
== TREE_TYPE (arg1
));
1709 /* If the type is already signed, just do the simple thing. */
1710 if (!TYPE_UNSIGNED (type
))
1711 return size_binop (MINUS_EXPR
, arg0
, arg1
);
1713 ctype
= type
== bitsizetype
? sbitsizetype
: ssizetype
;
1715 /* If either operand is not a constant, do the conversions to the signed
1716 type and subtract. The hardware will do the right thing with any
1717 overflow in the subtraction. */
1718 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1719 return size_binop (MINUS_EXPR
, fold_convert (ctype
, arg0
),
1720 fold_convert (ctype
, arg1
));
1722 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1723 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1724 overflow) and negate (which can't either). Special-case a result
1725 of zero while we're here. */
1726 if (tree_int_cst_equal (arg0
, arg1
))
1727 return fold_convert (ctype
, integer_zero_node
);
1728 else if (tree_int_cst_lt (arg1
, arg0
))
1729 return fold_convert (ctype
, size_binop (MINUS_EXPR
, arg0
, arg1
));
1731 return size_binop (MINUS_EXPR
, fold_convert (ctype
, integer_zero_node
),
1732 fold_convert (ctype
, size_binop (MINUS_EXPR
,
1736 /* A subroutine of fold_convert_const handling conversions of an
1737 INTEGER_CST to another integer type. */
1740 fold_convert_const_int_from_int (tree type
, tree arg1
)
1744 /* Given an integer constant, make new constant with new type,
1745 appropriately sign-extended or truncated. */
1746 t
= build_int_cst_wide (type
, TREE_INT_CST_LOW (arg1
),
1747 TREE_INT_CST_HIGH (arg1
));
1749 t
= force_fit_type (t
,
1750 /* Don't set the overflow when
1751 converting a pointer */
1752 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1753 (TREE_INT_CST_HIGH (arg1
) < 0
1754 && (TYPE_UNSIGNED (type
)
1755 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1756 | TREE_OVERFLOW (arg1
),
1757 TREE_CONSTANT_OVERFLOW (arg1
));
1762 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1763 to an integer type. */
1766 fold_convert_const_int_from_real (enum tree_code code
, tree type
, tree arg1
)
1771 /* The following code implements the floating point to integer
1772 conversion rules required by the Java Language Specification,
1773 that IEEE NaNs are mapped to zero and values that overflow
1774 the target precision saturate, i.e. values greater than
1775 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1776 are mapped to INT_MIN. These semantics are allowed by the
1777 C and C++ standards that simply state that the behavior of
1778 FP-to-integer conversion is unspecified upon overflow. */
1780 HOST_WIDE_INT high
, low
;
1782 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1786 case FIX_TRUNC_EXPR
:
1787 real_trunc (&r
, VOIDmode
, &x
);
1791 real_ceil (&r
, VOIDmode
, &x
);
1794 case FIX_FLOOR_EXPR
:
1795 real_floor (&r
, VOIDmode
, &x
);
1798 case FIX_ROUND_EXPR
:
1799 real_round (&r
, VOIDmode
, &x
);
1806 /* If R is NaN, return zero and show we have an overflow. */
1807 if (REAL_VALUE_ISNAN (r
))
1814 /* See if R is less than the lower bound or greater than the
1819 tree lt
= TYPE_MIN_VALUE (type
);
1820 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1821 if (REAL_VALUES_LESS (r
, l
))
1824 high
= TREE_INT_CST_HIGH (lt
);
1825 low
= TREE_INT_CST_LOW (lt
);
1831 tree ut
= TYPE_MAX_VALUE (type
);
1834 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1835 if (REAL_VALUES_LESS (u
, r
))
1838 high
= TREE_INT_CST_HIGH (ut
);
1839 low
= TREE_INT_CST_LOW (ut
);
1845 REAL_VALUE_TO_INT (&low
, &high
, r
);
1847 t
= build_int_cst_wide (type
, low
, high
);
1849 t
= force_fit_type (t
, -1, overflow
| TREE_OVERFLOW (arg1
),
1850 TREE_CONSTANT_OVERFLOW (arg1
));
1854 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1855 to another floating point type. */
1858 fold_convert_const_real_from_real (tree type
, tree arg1
)
1860 REAL_VALUE_TYPE value
;
1863 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1864 t
= build_real (type
, value
);
1866 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1867 TREE_CONSTANT_OVERFLOW (t
)
1868 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1872 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1873 type TYPE. If no simplification can be done return NULL_TREE. */
1876 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1878 if (TREE_TYPE (arg1
) == type
)
1881 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
1883 if (TREE_CODE (arg1
) == INTEGER_CST
)
1884 return fold_convert_const_int_from_int (type
, arg1
);
1885 else if (TREE_CODE (arg1
) == REAL_CST
)
1886 return fold_convert_const_int_from_real (code
, type
, arg1
);
1888 else if (TREE_CODE (type
) == REAL_TYPE
)
1890 if (TREE_CODE (arg1
) == INTEGER_CST
)
1891 return build_real_from_int_cst (type
, arg1
);
1892 if (TREE_CODE (arg1
) == REAL_CST
)
1893 return fold_convert_const_real_from_real (type
, arg1
);
1898 /* Construct a vector of zero elements of vector type TYPE. */
1901 build_zero_vector (tree type
)
1906 elem
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1907 units
= TYPE_VECTOR_SUBPARTS (type
);
1910 for (i
= 0; i
< units
; i
++)
1911 list
= tree_cons (NULL_TREE
, elem
, list
);
1912 return build_vector (type
, list
);
1915 /* Convert expression ARG to type TYPE. Used by the middle-end for
1916 simple conversions in preference to calling the front-end's convert. */
1919 fold_convert (tree type
, tree arg
)
1921 tree orig
= TREE_TYPE (arg
);
1927 if (TREE_CODE (arg
) == ERROR_MARK
1928 || TREE_CODE (type
) == ERROR_MARK
1929 || TREE_CODE (orig
) == ERROR_MARK
)
1930 return error_mark_node
;
1932 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
)
1933 || lang_hooks
.types_compatible_p (TYPE_MAIN_VARIANT (type
),
1934 TYPE_MAIN_VARIANT (orig
)))
1935 return fold_build1 (NOP_EXPR
, type
, arg
);
1937 switch (TREE_CODE (type
))
1939 case INTEGER_TYPE
: case CHAR_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1940 case POINTER_TYPE
: case REFERENCE_TYPE
:
1942 if (TREE_CODE (arg
) == INTEGER_CST
)
1944 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1945 if (tem
!= NULL_TREE
)
1948 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1949 || TREE_CODE (orig
) == OFFSET_TYPE
)
1950 return fold_build1 (NOP_EXPR
, type
, arg
);
1951 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1953 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1954 return fold_convert (type
, tem
);
1956 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1957 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1958 return fold_build1 (NOP_EXPR
, type
, arg
);
1961 if (TREE_CODE (arg
) == INTEGER_CST
)
1963 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1964 if (tem
!= NULL_TREE
)
1967 else if (TREE_CODE (arg
) == REAL_CST
)
1969 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1970 if (tem
!= NULL_TREE
)
1974 switch (TREE_CODE (orig
))
1976 case INTEGER_TYPE
: case CHAR_TYPE
:
1977 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1978 case POINTER_TYPE
: case REFERENCE_TYPE
:
1979 return fold_build1 (FLOAT_EXPR
, type
, arg
);
1982 return fold_build1 (flag_float_store
? CONVERT_EXPR
: NOP_EXPR
,
1986 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1987 return fold_convert (type
, tem
);
1994 switch (TREE_CODE (orig
))
1996 case INTEGER_TYPE
: case CHAR_TYPE
:
1997 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1998 case POINTER_TYPE
: case REFERENCE_TYPE
:
2000 return build2 (COMPLEX_EXPR
, type
,
2001 fold_convert (TREE_TYPE (type
), arg
),
2002 fold_convert (TREE_TYPE (type
), integer_zero_node
));
2007 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2009 rpart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 0));
2010 ipart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 1));
2011 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
2014 arg
= save_expr (arg
);
2015 rpart
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2016 ipart
= fold_build1 (IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2017 rpart
= fold_convert (TREE_TYPE (type
), rpart
);
2018 ipart
= fold_convert (TREE_TYPE (type
), ipart
);
2019 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
2027 if (integer_zerop (arg
))
2028 return build_zero_vector (type
);
2029 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2030 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2031 || TREE_CODE (orig
) == VECTOR_TYPE
);
2032 return fold_build1 (VIEW_CONVERT_EXPR
, type
, arg
);
2035 return fold_build1 (CONVERT_EXPR
, type
, fold_ignored_result (arg
));
2042 /* Return false if expr can be assumed not to be an lvalue, true
2046 maybe_lvalue_p (tree x
)
2048 /* We only need to wrap lvalue tree codes. */
2049 switch (TREE_CODE (x
))
2060 case ALIGN_INDIRECT_REF
:
2061 case MISALIGNED_INDIRECT_REF
:
2063 case ARRAY_RANGE_REF
:
2069 case PREINCREMENT_EXPR
:
2070 case PREDECREMENT_EXPR
:
2072 case TRY_CATCH_EXPR
:
2073 case WITH_CLEANUP_EXPR
:
2084 /* Assume the worst for front-end tree codes. */
2085 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2093 /* Return an expr equal to X but certainly not valid as an lvalue. */
2098 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2103 if (! maybe_lvalue_p (x
))
2105 return build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2108 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2109 Zero means allow extended lvalues. */
2111 int pedantic_lvalues
;
2113 /* When pedantic, return an expr equal to X but certainly not valid as a
2114 pedantic lvalue. Otherwise, return X. */
2117 pedantic_non_lvalue (tree x
)
2119 if (pedantic_lvalues
)
2120 return non_lvalue (x
);
2125 /* Given a tree comparison code, return the code that is the logical inverse
2126 of the given code. It is not safe to do this for floating-point
2127 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2128 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2131 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2133 if (honor_nans
&& flag_trapping_math
)
2143 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2145 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2147 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2149 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2163 return UNORDERED_EXPR
;
2164 case UNORDERED_EXPR
:
2165 return ORDERED_EXPR
;
2171 /* Similar, but return the comparison that results if the operands are
2172 swapped. This is safe for floating-point. */
2175 swap_tree_comparison (enum tree_code code
)
2182 case UNORDERED_EXPR
:
2208 /* Convert a comparison tree code from an enum tree_code representation
2209 into a compcode bit-based encoding. This function is the inverse of
2210 compcode_to_comparison. */
2212 static enum comparison_code
2213 comparison_to_compcode (enum tree_code code
)
2230 return COMPCODE_ORD
;
2231 case UNORDERED_EXPR
:
2232 return COMPCODE_UNORD
;
2234 return COMPCODE_UNLT
;
2236 return COMPCODE_UNEQ
;
2238 return COMPCODE_UNLE
;
2240 return COMPCODE_UNGT
;
2242 return COMPCODE_LTGT
;
2244 return COMPCODE_UNGE
;
2250 /* Convert a compcode bit-based encoding of a comparison operator back
2251 to GCC's enum tree_code representation. This function is the
2252 inverse of comparison_to_compcode. */
2254 static enum tree_code
2255 compcode_to_comparison (enum comparison_code code
)
2272 return ORDERED_EXPR
;
2273 case COMPCODE_UNORD
:
2274 return UNORDERED_EXPR
;
2292 /* Return a tree for the comparison which is the combination of
2293 doing the AND or OR (depending on CODE) of the two operations LCODE
2294 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2295 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2296 if this makes the transformation invalid. */
2299 combine_comparisons (enum tree_code code
, enum tree_code lcode
,
2300 enum tree_code rcode
, tree truth_type
,
2301 tree ll_arg
, tree lr_arg
)
2303 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2304 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2305 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2306 enum comparison_code compcode
;
2310 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2311 compcode
= lcompcode
& rcompcode
;
2314 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2315 compcode
= lcompcode
| rcompcode
;
2324 /* Eliminate unordered comparisons, as well as LTGT and ORD
2325 which are not used unless the mode has NaNs. */
2326 compcode
&= ~COMPCODE_UNORD
;
2327 if (compcode
== COMPCODE_LTGT
)
2328 compcode
= COMPCODE_NE
;
2329 else if (compcode
== COMPCODE_ORD
)
2330 compcode
= COMPCODE_TRUE
;
2332 else if (flag_trapping_math
)
2334 /* Check that the original operation and the optimized ones will trap
2335 under the same condition. */
2336 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2337 && (lcompcode
!= COMPCODE_EQ
)
2338 && (lcompcode
!= COMPCODE_ORD
);
2339 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2340 && (rcompcode
!= COMPCODE_EQ
)
2341 && (rcompcode
!= COMPCODE_ORD
);
2342 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2343 && (compcode
!= COMPCODE_EQ
)
2344 && (compcode
!= COMPCODE_ORD
);
2346 /* In a short-circuited boolean expression the LHS might be
2347 such that the RHS, if evaluated, will never trap. For
2348 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2349 if neither x nor y is NaN. (This is a mixed blessing: for
2350 example, the expression above will never trap, hence
2351 optimizing it to x < y would be invalid). */
2352 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2353 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2356 /* If the comparison was short-circuited, and only the RHS
2357 trapped, we may now generate a spurious trap. */
2359 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2362 /* If we changed the conditions that cause a trap, we lose. */
2363 if ((ltrap
|| rtrap
) != trap
)
2367 if (compcode
== COMPCODE_TRUE
)
2368 return constant_boolean_node (true, truth_type
);
2369 else if (compcode
== COMPCODE_FALSE
)
2370 return constant_boolean_node (false, truth_type
);
2372 return fold_build2 (compcode_to_comparison (compcode
),
2373 truth_type
, ll_arg
, lr_arg
);
2376 /* Return nonzero if CODE is a tree code that represents a truth value. */
2379 truth_value_p (enum tree_code code
)
2381 return (TREE_CODE_CLASS (code
) == tcc_comparison
2382 || code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
2383 || code
== TRUTH_OR_EXPR
|| code
== TRUTH_ORIF_EXPR
2384 || code
== TRUTH_XOR_EXPR
|| code
== TRUTH_NOT_EXPR
);
2387 /* Return nonzero if two operands (typically of the same tree node)
2388 are necessarily equal. If either argument has side-effects this
2389 function returns zero. FLAGS modifies behavior as follows:
2391 If OEP_ONLY_CONST is set, only return nonzero for constants.
2392 This function tests whether the operands are indistinguishable;
2393 it does not test whether they are equal using C's == operation.
2394 The distinction is important for IEEE floating point, because
2395 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2396 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2398 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2399 even though it may hold multiple values during a function.
2400 This is because a GCC tree node guarantees that nothing else is
2401 executed between the evaluation of its "operands" (which may often
2402 be evaluated in arbitrary order). Hence if the operands themselves
2403 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2404 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2405 unset means assuming isochronic (or instantaneous) tree equivalence.
2406 Unless comparing arbitrary expression trees, such as from different
2407 statements, this flag can usually be left unset.
2409 If OEP_PURE_SAME is set, then pure functions with identical arguments
2410 are considered the same. It is used when the caller has other ways
2411 to ensure that global memory is unchanged in between. */
2414 operand_equal_p (tree arg0
, tree arg1
, unsigned int flags
)
2416 /* If either is ERROR_MARK, they aren't equal. */
2417 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
)
2420 /* If both types don't have the same signedness, then we can't consider
2421 them equal. We must check this before the STRIP_NOPS calls
2422 because they may change the signedness of the arguments. */
2423 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2429 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2430 /* This is needed for conversions and for COMPONENT_REF.
2431 Might as well play it safe and always test this. */
2432 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2433 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2434 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2437 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2438 We don't care about side effects in that case because the SAVE_EXPR
2439 takes care of that for us. In all other cases, two expressions are
2440 equal if they have no side effects. If we have two identical
2441 expressions with side effects that should be treated the same due
2442 to the only side effects being identical SAVE_EXPR's, that will
2443 be detected in the recursive calls below. */
2444 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2445 && (TREE_CODE (arg0
) == SAVE_EXPR
2446 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2449 /* Next handle constant cases, those for which we can return 1 even
2450 if ONLY_CONST is set. */
2451 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2452 switch (TREE_CODE (arg0
))
2455 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2456 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2457 && tree_int_cst_equal (arg0
, arg1
));
2460 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2461 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2462 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2463 TREE_REAL_CST (arg1
)));
2469 if (TREE_CONSTANT_OVERFLOW (arg0
)
2470 || TREE_CONSTANT_OVERFLOW (arg1
))
2473 v1
= TREE_VECTOR_CST_ELTS (arg0
);
2474 v2
= TREE_VECTOR_CST_ELTS (arg1
);
2477 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
),
2480 v1
= TREE_CHAIN (v1
);
2481 v2
= TREE_CHAIN (v2
);
2488 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2490 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2494 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2495 && ! memcmp (TREE_STRING_POINTER (arg0
),
2496 TREE_STRING_POINTER (arg1
),
2497 TREE_STRING_LENGTH (arg0
)));
2500 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2506 if (flags
& OEP_ONLY_CONST
)
2509 /* Define macros to test an operand from arg0 and arg1 for equality and a
2510 variant that allows null and views null as being different from any
2511 non-null value. In the latter case, if either is null, the both
2512 must be; otherwise, do the normal comparison. */
2513 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2514 TREE_OPERAND (arg1, N), flags)
2516 #define OP_SAME_WITH_NULL(N) \
2517 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2518 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2520 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2523 /* Two conversions are equal only if signedness and modes match. */
2524 switch (TREE_CODE (arg0
))
2529 case FIX_TRUNC_EXPR
:
2530 case FIX_FLOOR_EXPR
:
2531 case FIX_ROUND_EXPR
:
2532 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2533 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2543 case tcc_comparison
:
2545 if (OP_SAME (0) && OP_SAME (1))
2548 /* For commutative ops, allow the other order. */
2549 return (commutative_tree_code (TREE_CODE (arg0
))
2550 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2551 TREE_OPERAND (arg1
, 1), flags
)
2552 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2553 TREE_OPERAND (arg1
, 0), flags
));
2556 /* If either of the pointer (or reference) expressions we are
2557 dereferencing contain a side effect, these cannot be equal. */
2558 if (TREE_SIDE_EFFECTS (arg0
)
2559 || TREE_SIDE_EFFECTS (arg1
))
2562 switch (TREE_CODE (arg0
))
2565 case ALIGN_INDIRECT_REF
:
2566 case MISALIGNED_INDIRECT_REF
:
2572 case ARRAY_RANGE_REF
:
2573 /* Operands 2 and 3 may be null. */
2576 && OP_SAME_WITH_NULL (2)
2577 && OP_SAME_WITH_NULL (3));
2580 /* Handle operand 2 the same as for ARRAY_REF. */
2581 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2584 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2590 case tcc_expression
:
2591 switch (TREE_CODE (arg0
))
2594 case TRUTH_NOT_EXPR
:
2597 case TRUTH_ANDIF_EXPR
:
2598 case TRUTH_ORIF_EXPR
:
2599 return OP_SAME (0) && OP_SAME (1);
2601 case TRUTH_AND_EXPR
:
2603 case TRUTH_XOR_EXPR
:
2604 if (OP_SAME (0) && OP_SAME (1))
2607 /* Otherwise take into account this is a commutative operation. */
2608 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2609 TREE_OPERAND (arg1
, 1), flags
)
2610 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2611 TREE_OPERAND (arg1
, 0), flags
));
2614 /* If the CALL_EXPRs call different functions, then they
2615 clearly can not be equal. */
2620 unsigned int cef
= call_expr_flags (arg0
);
2621 if (flags
& OEP_PURE_SAME
)
2622 cef
&= ECF_CONST
| ECF_PURE
;
2629 /* Now see if all the arguments are the same. operand_equal_p
2630 does not handle TREE_LIST, so we walk the operands here
2631 feeding them to operand_equal_p. */
2632 arg0
= TREE_OPERAND (arg0
, 1);
2633 arg1
= TREE_OPERAND (arg1
, 1);
2634 while (arg0
&& arg1
)
2636 if (! operand_equal_p (TREE_VALUE (arg0
), TREE_VALUE (arg1
),
2640 arg0
= TREE_CHAIN (arg0
);
2641 arg1
= TREE_CHAIN (arg1
);
2644 /* If we get here and both argument lists are exhausted
2645 then the CALL_EXPRs are equal. */
2646 return ! (arg0
|| arg1
);
2652 case tcc_declaration
:
2653 /* Consider __builtin_sqrt equal to sqrt. */
2654 return (TREE_CODE (arg0
) == FUNCTION_DECL
2655 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2656 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2657 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2664 #undef OP_SAME_WITH_NULL
2667 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2668 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2670 When in doubt, return 0. */
2673 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2675 int unsignedp1
, unsignedpo
;
2676 tree primarg0
, primarg1
, primother
;
2677 unsigned int correct_width
;
2679 if (operand_equal_p (arg0
, arg1
, 0))
2682 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2683 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2686 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2687 and see if the inner values are the same. This removes any
2688 signedness comparison, which doesn't matter here. */
2689 primarg0
= arg0
, primarg1
= arg1
;
2690 STRIP_NOPS (primarg0
);
2691 STRIP_NOPS (primarg1
);
2692 if (operand_equal_p (primarg0
, primarg1
, 0))
2695 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2696 actual comparison operand, ARG0.
2698 First throw away any conversions to wider types
2699 already present in the operands. */
2701 primarg1
= get_narrower (arg1
, &unsignedp1
);
2702 primother
= get_narrower (other
, &unsignedpo
);
2704 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2705 if (unsignedp1
== unsignedpo
2706 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2707 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2709 tree type
= TREE_TYPE (arg0
);
2711 /* Make sure shorter operand is extended the right way
2712 to match the longer operand. */
2713 primarg1
= fold_convert (lang_hooks
.types
.signed_or_unsigned_type
2714 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2716 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2723 /* See if ARG is an expression that is either a comparison or is performing
2724 arithmetic on comparisons. The comparisons must only be comparing
2725 two different values, which will be stored in *CVAL1 and *CVAL2; if
2726 they are nonzero it means that some operands have already been found.
2727 No variables may be used anywhere else in the expression except in the
2728 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2729 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2731 If this is true, return 1. Otherwise, return zero. */
2734 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2736 enum tree_code code
= TREE_CODE (arg
);
2737 enum tree_code_class
class = TREE_CODE_CLASS (code
);
2739 /* We can handle some of the tcc_expression cases here. */
2740 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2742 else if (class == tcc_expression
2743 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2744 || code
== COMPOUND_EXPR
))
2747 else if (class == tcc_expression
&& code
== SAVE_EXPR
2748 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2750 /* If we've already found a CVAL1 or CVAL2, this expression is
2751 two complex to handle. */
2752 if (*cval1
|| *cval2
)
2762 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2765 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2766 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2767 cval1
, cval2
, save_p
));
2772 case tcc_expression
:
2773 if (code
== COND_EXPR
)
2774 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2775 cval1
, cval2
, save_p
)
2776 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2777 cval1
, cval2
, save_p
)
2778 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2779 cval1
, cval2
, save_p
));
2782 case tcc_comparison
:
2783 /* First see if we can handle the first operand, then the second. For
2784 the second operand, we know *CVAL1 can't be zero. It must be that
2785 one side of the comparison is each of the values; test for the
2786 case where this isn't true by failing if the two operands
2789 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2790 TREE_OPERAND (arg
, 1), 0))
2794 *cval1
= TREE_OPERAND (arg
, 0);
2795 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2797 else if (*cval2
== 0)
2798 *cval2
= TREE_OPERAND (arg
, 0);
2799 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2804 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2806 else if (*cval2
== 0)
2807 *cval2
= TREE_OPERAND (arg
, 1);
2808 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2820 /* ARG is a tree that is known to contain just arithmetic operations and
2821 comparisons. Evaluate the operations in the tree substituting NEW0 for
2822 any occurrence of OLD0 as an operand of a comparison and likewise for
2826 eval_subst (tree arg
, tree old0
, tree new0
, tree old1
, tree new1
)
2828 tree type
= TREE_TYPE (arg
);
2829 enum tree_code code
= TREE_CODE (arg
);
2830 enum tree_code_class
class = TREE_CODE_CLASS (code
);
2832 /* We can handle some of the tcc_expression cases here. */
2833 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2835 else if (class == tcc_expression
2836 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2842 return fold_build1 (code
, type
,
2843 eval_subst (TREE_OPERAND (arg
, 0),
2844 old0
, new0
, old1
, new1
));
2847 return fold_build2 (code
, type
,
2848 eval_subst (TREE_OPERAND (arg
, 0),
2849 old0
, new0
, old1
, new1
),
2850 eval_subst (TREE_OPERAND (arg
, 1),
2851 old0
, new0
, old1
, new1
));
2853 case tcc_expression
:
2857 return eval_subst (TREE_OPERAND (arg
, 0), old0
, new0
, old1
, new1
);
2860 return eval_subst (TREE_OPERAND (arg
, 1), old0
, new0
, old1
, new1
);
2863 return fold_build3 (code
, type
,
2864 eval_subst (TREE_OPERAND (arg
, 0),
2865 old0
, new0
, old1
, new1
),
2866 eval_subst (TREE_OPERAND (arg
, 1),
2867 old0
, new0
, old1
, new1
),
2868 eval_subst (TREE_OPERAND (arg
, 2),
2869 old0
, new0
, old1
, new1
));
2873 /* Fall through - ??? */
2875 case tcc_comparison
:
2877 tree arg0
= TREE_OPERAND (arg
, 0);
2878 tree arg1
= TREE_OPERAND (arg
, 1);
2880 /* We need to check both for exact equality and tree equality. The
2881 former will be true if the operand has a side-effect. In that
2882 case, we know the operand occurred exactly once. */
2884 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2886 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2889 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2891 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
2894 return fold_build2 (code
, type
, arg0
, arg1
);
2902 /* Return a tree for the case when the result of an expression is RESULT
2903 converted to TYPE and OMITTED was previously an operand of the expression
2904 but is now not needed (e.g., we folded OMITTED * 0).
2906 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2907 the conversion of RESULT to TYPE. */
2910 omit_one_operand (tree type
, tree result
, tree omitted
)
2912 tree t
= fold_convert (type
, result
);
2914 if (TREE_SIDE_EFFECTS (omitted
))
2915 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
2917 return non_lvalue (t
);
2920 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2923 pedantic_omit_one_operand (tree type
, tree result
, tree omitted
)
2925 tree t
= fold_convert (type
, result
);
2927 if (TREE_SIDE_EFFECTS (omitted
))
2928 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
2930 return pedantic_non_lvalue (t
);
2933 /* Return a tree for the case when the result of an expression is RESULT
2934 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2935 of the expression but are now not needed.
2937 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2938 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2939 evaluated before OMITTED2. Otherwise, if neither has side effects,
2940 just do the conversion of RESULT to TYPE. */
2943 omit_two_operands (tree type
, tree result
, tree omitted1
, tree omitted2
)
2945 tree t
= fold_convert (type
, result
);
2947 if (TREE_SIDE_EFFECTS (omitted2
))
2948 t
= build2 (COMPOUND_EXPR
, type
, omitted2
, t
);
2949 if (TREE_SIDE_EFFECTS (omitted1
))
2950 t
= build2 (COMPOUND_EXPR
, type
, omitted1
, t
);
2952 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue (t
) : t
;
2956 /* Return a simplified tree node for the truth-negation of ARG. This
2957 never alters ARG itself. We assume that ARG is an operation that
2958 returns a truth value (0 or 1).
2960 FIXME: one would think we would fold the result, but it causes
2961 problems with the dominator optimizer. */
2963 invert_truthvalue (tree arg
)
2965 tree type
= TREE_TYPE (arg
);
2966 enum tree_code code
= TREE_CODE (arg
);
2968 if (code
== ERROR_MARK
)
2971 /* If this is a comparison, we can simply invert it, except for
2972 floating-point non-equality comparisons, in which case we just
2973 enclose a TRUTH_NOT_EXPR around what we have. */
2975 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
2977 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
2978 if (FLOAT_TYPE_P (op_type
)
2979 && flag_trapping_math
2980 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
2981 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
2982 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2985 code
= invert_tree_comparison (code
,
2986 HONOR_NANS (TYPE_MODE (op_type
)));
2987 if (code
== ERROR_MARK
)
2988 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2990 return build2 (code
, type
,
2991 TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
2998 return constant_boolean_node (integer_zerop (arg
), type
);
3000 case TRUTH_AND_EXPR
:
3001 return build2 (TRUTH_OR_EXPR
, type
,
3002 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3003 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3006 return build2 (TRUTH_AND_EXPR
, type
,
3007 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3008 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3010 case TRUTH_XOR_EXPR
:
3011 /* Here we can invert either operand. We invert the first operand
3012 unless the second operand is a TRUTH_NOT_EXPR in which case our
3013 result is the XOR of the first operand with the inside of the
3014 negation of the second operand. */
3016 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3017 return build2 (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3018 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3020 return build2 (TRUTH_XOR_EXPR
, type
,
3021 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3022 TREE_OPERAND (arg
, 1));
3024 case TRUTH_ANDIF_EXPR
:
3025 return build2 (TRUTH_ORIF_EXPR
, type
,
3026 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3027 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3029 case TRUTH_ORIF_EXPR
:
3030 return build2 (TRUTH_ANDIF_EXPR
, type
,
3031 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3032 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3034 case TRUTH_NOT_EXPR
:
3035 return TREE_OPERAND (arg
, 0);
3039 tree arg1
= TREE_OPERAND (arg
, 1);
3040 tree arg2
= TREE_OPERAND (arg
, 2);
3041 /* A COND_EXPR may have a throw as one operand, which
3042 then has void type. Just leave void operands
3044 return build3 (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3045 VOID_TYPE_P (TREE_TYPE (arg1
))
3046 ? arg1
: invert_truthvalue (arg1
),
3047 VOID_TYPE_P (TREE_TYPE (arg2
))
3048 ? arg2
: invert_truthvalue (arg2
));
3052 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3053 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3055 case NON_LVALUE_EXPR
:
3056 return invert_truthvalue (TREE_OPERAND (arg
, 0));
3059 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3064 return build1 (TREE_CODE (arg
), type
,
3065 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3068 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3070 return build2 (EQ_EXPR
, type
, arg
,
3071 fold_convert (type
, integer_zero_node
));
3074 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3076 case CLEANUP_POINT_EXPR
:
3077 return build1 (CLEANUP_POINT_EXPR
, type
,
3078 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3083 gcc_assert (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
);
3084 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3087 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3088 operands are another bit-wise operation with a common input. If so,
3089 distribute the bit operations to save an operation and possibly two if
3090 constants are involved. For example, convert
3091 (A | B) & (A | C) into A | (B & C)
3092 Further simplification will occur if B and C are constants.
3094 If this optimization cannot be done, 0 will be returned. */
3097 distribute_bit_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3102 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3103 || TREE_CODE (arg0
) == code
3104 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3105 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3108 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3110 common
= TREE_OPERAND (arg0
, 0);
3111 left
= TREE_OPERAND (arg0
, 1);
3112 right
= TREE_OPERAND (arg1
, 1);
3114 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3116 common
= TREE_OPERAND (arg0
, 0);
3117 left
= TREE_OPERAND (arg0
, 1);
3118 right
= TREE_OPERAND (arg1
, 0);
3120 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3122 common
= TREE_OPERAND (arg0
, 1);
3123 left
= TREE_OPERAND (arg0
, 0);
3124 right
= TREE_OPERAND (arg1
, 1);
3126 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3128 common
= TREE_OPERAND (arg0
, 1);
3129 left
= TREE_OPERAND (arg0
, 0);
3130 right
= TREE_OPERAND (arg1
, 0);
3135 return fold_build2 (TREE_CODE (arg0
), type
, common
,
3136 fold_build2 (code
, type
, left
, right
));
3139 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3140 with code CODE. This optimization is unsafe. */
3142 distribute_real_division (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3144 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3145 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3147 /* (A / C) +- (B / C) -> (A +- B) / C. */
3149 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3150 TREE_OPERAND (arg1
, 1), 0))
3151 return fold_build2 (mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3152 fold_build2 (code
, type
,
3153 TREE_OPERAND (arg0
, 0),
3154 TREE_OPERAND (arg1
, 0)),
3155 TREE_OPERAND (arg0
, 1));
3157 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3158 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3159 TREE_OPERAND (arg1
, 0), 0)
3160 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3161 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3163 REAL_VALUE_TYPE r0
, r1
;
3164 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3165 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3167 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3169 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3170 real_arithmetic (&r0
, code
, &r0
, &r1
);
3171 return fold_build2 (MULT_EXPR
, type
,
3172 TREE_OPERAND (arg0
, 0),
3173 build_real (type
, r0
));
3179 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3180 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3183 make_bit_field_ref (tree inner
, tree type
, int bitsize
, int bitpos
,
3190 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3191 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3192 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3193 && host_integerp (size
, 0)
3194 && tree_low_cst (size
, 0) == bitsize
)
3195 return fold_convert (type
, inner
);
3198 result
= build3 (BIT_FIELD_REF
, type
, inner
,
3199 size_int (bitsize
), bitsize_int (bitpos
));
3201 BIT_FIELD_REF_UNSIGNED (result
) = unsignedp
;
3206 /* Optimize a bit-field compare.
3208 There are two cases: First is a compare against a constant and the
3209 second is a comparison of two items where the fields are at the same
3210 bit position relative to the start of a chunk (byte, halfword, word)
3211 large enough to contain it. In these cases we can avoid the shift
3212 implicit in bitfield extractions.
3214 For constants, we emit a compare of the shifted constant with the
3215 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3216 compared. For two fields at the same position, we do the ANDs with the
3217 similar mask and compare the result of the ANDs.
3219 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3220 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3221 are the left and right operands of the comparison, respectively.
3223 If the optimization described above can be done, we return the resulting
3224 tree. Otherwise we return zero. */
3227 optimize_bit_field_compare (enum tree_code code
, tree compare_type
,
3230 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3231 tree type
= TREE_TYPE (lhs
);
3232 tree signed_type
, unsigned_type
;
3233 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3234 enum machine_mode lmode
, rmode
, nmode
;
3235 int lunsignedp
, runsignedp
;
3236 int lvolatilep
= 0, rvolatilep
= 0;
3237 tree linner
, rinner
= NULL_TREE
;
3241 /* Get all the information about the extractions being done. If the bit size
3242 if the same as the size of the underlying object, we aren't doing an
3243 extraction at all and so can do nothing. We also don't want to
3244 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3245 then will no longer be able to replace it. */
3246 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3247 &lunsignedp
, &lvolatilep
, false);
3248 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3249 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3254 /* If this is not a constant, we can only do something if bit positions,
3255 sizes, and signedness are the same. */
3256 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3257 &runsignedp
, &rvolatilep
, false);
3259 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3260 || lunsignedp
!= runsignedp
|| offset
!= 0
3261 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3265 /* See if we can find a mode to refer to this field. We should be able to,
3266 but fail if we can't. */
3267 nmode
= get_best_mode (lbitsize
, lbitpos
,
3268 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3269 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3270 TYPE_ALIGN (TREE_TYPE (rinner
))),
3271 word_mode
, lvolatilep
|| rvolatilep
);
3272 if (nmode
== VOIDmode
)
3275 /* Set signed and unsigned types of the precision of this mode for the
3277 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3278 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3280 /* Compute the bit position and size for the new reference and our offset
3281 within it. If the new reference is the same size as the original, we
3282 won't optimize anything, so return zero. */
3283 nbitsize
= GET_MODE_BITSIZE (nmode
);
3284 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3286 if (nbitsize
== lbitsize
)
3289 if (BYTES_BIG_ENDIAN
)
3290 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3292 /* Make the mask to be used against the extracted field. */
3293 mask
= build_int_cst (unsigned_type
, -1);
3294 mask
= force_fit_type (mask
, 0, false, false);
3295 mask
= fold_convert (unsigned_type
, mask
);
3296 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
), 0);
3297 mask
= const_binop (RSHIFT_EXPR
, mask
,
3298 size_int (nbitsize
- lbitsize
- lbitpos
), 0);
3301 /* If not comparing with constant, just rework the comparison
3303 return build2 (code
, compare_type
,
3304 build2 (BIT_AND_EXPR
, unsigned_type
,
3305 make_bit_field_ref (linner
, unsigned_type
,
3306 nbitsize
, nbitpos
, 1),
3308 build2 (BIT_AND_EXPR
, unsigned_type
,
3309 make_bit_field_ref (rinner
, unsigned_type
,
3310 nbitsize
, nbitpos
, 1),
3313 /* Otherwise, we are handling the constant case. See if the constant is too
3314 big for the field. Warn and return a tree of for 0 (false) if so. We do
3315 this not only for its own sake, but to avoid having to test for this
3316 error case below. If we didn't, we might generate wrong code.
3318 For unsigned fields, the constant shifted right by the field length should
3319 be all zero. For signed fields, the high-order bits should agree with
3324 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3325 fold_convert (unsigned_type
, rhs
),
3326 size_int (lbitsize
), 0)))
3328 warning (0, "comparison is always %d due to width of bit-field",
3330 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3335 tree tem
= const_binop (RSHIFT_EXPR
, fold_convert (signed_type
, rhs
),
3336 size_int (lbitsize
- 1), 0);
3337 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3339 warning (0, "comparison is always %d due to width of bit-field",
3341 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3345 /* Single-bit compares should always be against zero. */
3346 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3348 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3349 rhs
= fold_convert (type
, integer_zero_node
);
3352 /* Make a new bitfield reference, shift the constant over the
3353 appropriate number of bits and mask it with the computed mask
3354 (in case this was a signed field). If we changed it, make a new one. */
3355 lhs
= make_bit_field_ref (linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3358 TREE_SIDE_EFFECTS (lhs
) = 1;
3359 TREE_THIS_VOLATILE (lhs
) = 1;
3362 rhs
= const_binop (BIT_AND_EXPR
,
3363 const_binop (LSHIFT_EXPR
,
3364 fold_convert (unsigned_type
, rhs
),
3365 size_int (lbitpos
), 0),
3368 return build2 (code
, compare_type
,
3369 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
),
3373 /* Subroutine for fold_truthop: decode a field reference.
3375 If EXP is a comparison reference, we return the innermost reference.
3377 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3378 set to the starting bit number.
3380 If the innermost field can be completely contained in a mode-sized
3381 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3383 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3384 otherwise it is not changed.
3386 *PUNSIGNEDP is set to the signedness of the field.
3388 *PMASK is set to the mask used. This is either contained in a
3389 BIT_AND_EXPR or derived from the width of the field.
3391 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3393 Return 0 if this is not a component reference or is one that we can't
3394 do anything with. */
3397 decode_field_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
3398 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3399 int *punsignedp
, int *pvolatilep
,
3400 tree
*pmask
, tree
*pand_mask
)
3402 tree outer_type
= 0;
3404 tree mask
, inner
, offset
;
3406 unsigned int precision
;
3408 /* All the optimizations using this function assume integer fields.
3409 There are problems with FP fields since the type_for_size call
3410 below can fail for, e.g., XFmode. */
3411 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3414 /* We are interested in the bare arrangement of bits, so strip everything
3415 that doesn't affect the machine mode. However, record the type of the
3416 outermost expression if it may matter below. */
3417 if (TREE_CODE (exp
) == NOP_EXPR
3418 || TREE_CODE (exp
) == CONVERT_EXPR
3419 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3420 outer_type
= TREE_TYPE (exp
);
3423 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3425 and_mask
= TREE_OPERAND (exp
, 1);
3426 exp
= TREE_OPERAND (exp
, 0);
3427 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3428 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3432 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3433 punsignedp
, pvolatilep
, false);
3434 if ((inner
== exp
&& and_mask
== 0)
3435 || *pbitsize
< 0 || offset
!= 0
3436 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3439 /* If the number of bits in the reference is the same as the bitsize of
3440 the outer type, then the outer type gives the signedness. Otherwise
3441 (in case of a small bitfield) the signedness is unchanged. */
3442 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3443 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3445 /* Compute the mask to access the bitfield. */
3446 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3447 precision
= TYPE_PRECISION (unsigned_type
);
3449 mask
= build_int_cst (unsigned_type
, -1);
3450 mask
= force_fit_type (mask
, 0, false, false);
3452 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3453 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3455 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3457 mask
= fold_build2 (BIT_AND_EXPR
, unsigned_type
,
3458 fold_convert (unsigned_type
, and_mask
), mask
);
3461 *pand_mask
= and_mask
;
3465 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3469 all_ones_mask_p (tree mask
, int size
)
3471 tree type
= TREE_TYPE (mask
);
3472 unsigned int precision
= TYPE_PRECISION (type
);
3475 tmask
= build_int_cst (lang_hooks
.types
.signed_type (type
), -1);
3476 tmask
= force_fit_type (tmask
, 0, false, false);
3479 tree_int_cst_equal (mask
,
3480 const_binop (RSHIFT_EXPR
,
3481 const_binop (LSHIFT_EXPR
, tmask
,
3482 size_int (precision
- size
),
3484 size_int (precision
- size
), 0));
3487 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3488 represents the sign bit of EXP's type. If EXP represents a sign
3489 or zero extension, also test VAL against the unextended type.
3490 The return value is the (sub)expression whose sign bit is VAL,
3491 or NULL_TREE otherwise. */
3494 sign_bit_p (tree exp
, tree val
)
3496 unsigned HOST_WIDE_INT mask_lo
, lo
;
3497 HOST_WIDE_INT mask_hi
, hi
;
3501 /* Tree EXP must have an integral type. */
3502 t
= TREE_TYPE (exp
);
3503 if (! INTEGRAL_TYPE_P (t
))
3506 /* Tree VAL must be an integer constant. */
3507 if (TREE_CODE (val
) != INTEGER_CST
3508 || TREE_CONSTANT_OVERFLOW (val
))
3511 width
= TYPE_PRECISION (t
);
3512 if (width
> HOST_BITS_PER_WIDE_INT
)
3514 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3517 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3518 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
3524 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3527 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3528 >> (HOST_BITS_PER_WIDE_INT
- width
));
3531 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3532 treat VAL as if it were unsigned. */
3533 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3534 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3537 /* Handle extension from a narrower type. */
3538 if (TREE_CODE (exp
) == NOP_EXPR
3539 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3540 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3545 /* Subroutine for fold_truthop: determine if an operand is simple enough
3546 to be evaluated unconditionally. */
3549 simple_operand_p (tree exp
)
3551 /* Strip any conversions that don't change the machine mode. */
3554 return (CONSTANT_CLASS_P (exp
)
3555 || TREE_CODE (exp
) == SSA_NAME
3557 && ! TREE_ADDRESSABLE (exp
)
3558 && ! TREE_THIS_VOLATILE (exp
)
3559 && ! DECL_NONLOCAL (exp
)
3560 /* Don't regard global variables as simple. They may be
3561 allocated in ways unknown to the compiler (shared memory,
3562 #pragma weak, etc). */
3563 && ! TREE_PUBLIC (exp
)
3564 && ! DECL_EXTERNAL (exp
)
3565 /* Loading a static variable is unduly expensive, but global
3566 registers aren't expensive. */
3567 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3570 /* The following functions are subroutines to fold_range_test and allow it to
3571 try to change a logical combination of comparisons into a range test.
3574 X == 2 || X == 3 || X == 4 || X == 5
3578 (unsigned) (X - 2) <= 3
3580 We describe each set of comparisons as being either inside or outside
3581 a range, using a variable named like IN_P, and then describe the
3582 range with a lower and upper bound. If one of the bounds is omitted,
3583 it represents either the highest or lowest value of the type.
3585 In the comments below, we represent a range by two numbers in brackets
3586 preceded by a "+" to designate being inside that range, or a "-" to
3587 designate being outside that range, so the condition can be inverted by
3588 flipping the prefix. An omitted bound is represented by a "-". For
3589 example, "- [-, 10]" means being outside the range starting at the lowest
3590 possible value and ending at 10, in other words, being greater than 10.
3591 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3594 We set up things so that the missing bounds are handled in a consistent
3595 manner so neither a missing bound nor "true" and "false" need to be
3596 handled using a special case. */
3598 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3599 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3600 and UPPER1_P are nonzero if the respective argument is an upper bound
3601 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3602 must be specified for a comparison. ARG1 will be converted to ARG0's
3603 type if both are specified. */
3606 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3607 tree arg1
, int upper1_p
)
3613 /* If neither arg represents infinity, do the normal operation.
3614 Else, if not a comparison, return infinity. Else handle the special
3615 comparison rules. Note that most of the cases below won't occur, but
3616 are handled for consistency. */
3618 if (arg0
!= 0 && arg1
!= 0)
3620 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3621 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3623 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3626 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3629 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3630 for neither. In real maths, we cannot assume open ended ranges are
3631 the same. But, this is computer arithmetic, where numbers are finite.
3632 We can therefore make the transformation of any unbounded range with
3633 the value Z, Z being greater than any representable number. This permits
3634 us to treat unbounded ranges as equal. */
3635 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3636 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3640 result
= sgn0
== sgn1
;
3643 result
= sgn0
!= sgn1
;
3646 result
= sgn0
< sgn1
;
3649 result
= sgn0
<= sgn1
;
3652 result
= sgn0
> sgn1
;
3655 result
= sgn0
>= sgn1
;
3661 return constant_boolean_node (result
, type
);
3664 /* Given EXP, a logical expression, set the range it is testing into
3665 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3666 actually being tested. *PLOW and *PHIGH will be made of the same type
3667 as the returned expression. If EXP is not a comparison, we will most
3668 likely not be returning a useful value and range. */
3671 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
)
3673 enum tree_code code
;
3674 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
3675 tree exp_type
= NULL_TREE
, arg0_type
= NULL_TREE
;
3677 tree low
, high
, n_low
, n_high
;
3679 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3680 and see if we can refine the range. Some of the cases below may not
3681 happen, but it doesn't seem worth worrying about this. We "continue"
3682 the outer loop when we've changed something; otherwise we "break"
3683 the switch, which will "break" the while. */
3686 low
= high
= fold_convert (TREE_TYPE (exp
), integer_zero_node
);
3690 code
= TREE_CODE (exp
);
3691 exp_type
= TREE_TYPE (exp
);
3693 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
3695 if (TREE_CODE_LENGTH (code
) > 0)
3696 arg0
= TREE_OPERAND (exp
, 0);
3697 if (TREE_CODE_CLASS (code
) == tcc_comparison
3698 || TREE_CODE_CLASS (code
) == tcc_unary
3699 || TREE_CODE_CLASS (code
) == tcc_binary
)
3700 arg0_type
= TREE_TYPE (arg0
);
3701 if (TREE_CODE_CLASS (code
) == tcc_binary
3702 || TREE_CODE_CLASS (code
) == tcc_comparison
3703 || (TREE_CODE_CLASS (code
) == tcc_expression
3704 && TREE_CODE_LENGTH (code
) > 1))
3705 arg1
= TREE_OPERAND (exp
, 1);
3710 case TRUTH_NOT_EXPR
:
3711 in_p
= ! in_p
, exp
= arg0
;
3714 case EQ_EXPR
: case NE_EXPR
:
3715 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3716 /* We can only do something if the range is testing for zero
3717 and if the second operand is an integer constant. Note that
3718 saying something is "in" the range we make is done by
3719 complementing IN_P since it will set in the initial case of
3720 being not equal to zero; "out" is leaving it alone. */
3721 if (low
== 0 || high
== 0
3722 || ! integer_zerop (low
) || ! integer_zerop (high
)
3723 || TREE_CODE (arg1
) != INTEGER_CST
)
3728 case NE_EXPR
: /* - [c, c] */
3731 case EQ_EXPR
: /* + [c, c] */
3732 in_p
= ! in_p
, low
= high
= arg1
;
3734 case GT_EXPR
: /* - [-, c] */
3735 low
= 0, high
= arg1
;
3737 case GE_EXPR
: /* + [c, -] */
3738 in_p
= ! in_p
, low
= arg1
, high
= 0;
3740 case LT_EXPR
: /* - [c, -] */
3741 low
= arg1
, high
= 0;
3743 case LE_EXPR
: /* + [-, c] */
3744 in_p
= ! in_p
, low
= 0, high
= arg1
;
3750 /* If this is an unsigned comparison, we also know that EXP is
3751 greater than or equal to zero. We base the range tests we make
3752 on that fact, so we record it here so we can parse existing
3753 range tests. We test arg0_type since often the return type
3754 of, e.g. EQ_EXPR, is boolean. */
3755 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3757 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3759 fold_convert (arg0_type
, integer_zero_node
),
3763 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3765 /* If the high bound is missing, but we have a nonzero low
3766 bound, reverse the range so it goes from zero to the low bound
3768 if (high
== 0 && low
&& ! integer_zerop (low
))
3771 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3772 integer_one_node
, 0);
3773 low
= fold_convert (arg0_type
, integer_zero_node
);
3781 /* (-x) IN [a,b] -> x in [-b, -a] */
3782 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3783 fold_convert (exp_type
, integer_zero_node
),
3785 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3786 fold_convert (exp_type
, integer_zero_node
),
3788 low
= n_low
, high
= n_high
;
3794 exp
= build2 (MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3795 fold_convert (exp_type
, integer_one_node
));
3798 case PLUS_EXPR
: case MINUS_EXPR
:
3799 if (TREE_CODE (arg1
) != INTEGER_CST
)
3802 /* If EXP is signed, any overflow in the computation is undefined,
3803 so we don't worry about it so long as our computations on
3804 the bounds don't overflow. For unsigned, overflow is defined
3805 and this is exactly the right thing. */
3806 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3807 arg0_type
, low
, 0, arg1
, 0);
3808 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3809 arg0_type
, high
, 1, arg1
, 0);
3810 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
3811 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
3814 /* Check for an unsigned range which has wrapped around the maximum
3815 value thus making n_high < n_low, and normalize it. */
3816 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
3818 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
3819 integer_one_node
, 0);
3820 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
3821 integer_one_node
, 0);
3823 /* If the range is of the form +/- [ x+1, x ], we won't
3824 be able to normalize it. But then, it represents the
3825 whole range or the empty set, so make it
3827 if (tree_int_cst_equal (n_low
, low
)
3828 && tree_int_cst_equal (n_high
, high
))
3834 low
= n_low
, high
= n_high
;
3839 case NOP_EXPR
: case NON_LVALUE_EXPR
: case CONVERT_EXPR
:
3840 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
3843 if (! INTEGRAL_TYPE_P (arg0_type
)
3844 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
3845 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
3848 n_low
= low
, n_high
= high
;
3851 n_low
= fold_convert (arg0_type
, n_low
);
3854 n_high
= fold_convert (arg0_type
, n_high
);
3857 /* If we're converting arg0 from an unsigned type, to exp,
3858 a signed type, we will be doing the comparison as unsigned.
3859 The tests above have already verified that LOW and HIGH
3862 So we have to ensure that we will handle large unsigned
3863 values the same way that the current signed bounds treat
3866 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
3869 tree equiv_type
= lang_hooks
.types
.type_for_mode
3870 (TYPE_MODE (arg0_type
), 1);
3872 /* A range without an upper bound is, naturally, unbounded.
3873 Since convert would have cropped a very large value, use
3874 the max value for the destination type. */
3876 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
3877 : TYPE_MAX_VALUE (arg0_type
);
3879 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
3880 high_positive
= fold_build2 (RSHIFT_EXPR
, arg0_type
,
3881 fold_convert (arg0_type
,
3883 fold_convert (arg0_type
,
3886 /* If the low bound is specified, "and" the range with the
3887 range for which the original unsigned value will be
3891 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3892 1, n_low
, n_high
, 1,
3893 fold_convert (arg0_type
,
3898 in_p
= (n_in_p
== in_p
);
3902 /* Otherwise, "or" the range with the range of the input
3903 that will be interpreted as negative. */
3904 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3905 0, n_low
, n_high
, 1,
3906 fold_convert (arg0_type
,
3911 in_p
= (in_p
!= n_in_p
);
3916 low
= n_low
, high
= n_high
;
3926 /* If EXP is a constant, we can evaluate whether this is true or false. */
3927 if (TREE_CODE (exp
) == INTEGER_CST
)
3929 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
3931 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
3937 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
3941 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3942 type, TYPE, return an expression to test if EXP is in (or out of, depending
3943 on IN_P) the range. Return 0 if the test couldn't be created. */
3946 build_range_check (tree type
, tree exp
, int in_p
, tree low
, tree high
)
3948 tree etype
= TREE_TYPE (exp
);
3951 #ifdef HAVE_canonicalize_funcptr_for_compare
3952 /* Disable this optimization for function pointer expressions
3953 on targets that require function pointer canonicalization. */
3954 if (HAVE_canonicalize_funcptr_for_compare
3955 && TREE_CODE (etype
) == POINTER_TYPE
3956 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
3962 value
= build_range_check (type
, exp
, 1, low
, high
);
3964 return invert_truthvalue (value
);
3969 if (low
== 0 && high
== 0)
3970 return fold_convert (type
, integer_one_node
);
3973 return fold_build2 (LE_EXPR
, type
, exp
,
3974 fold_convert (etype
, high
));
3977 return fold_build2 (GE_EXPR
, type
, exp
,
3978 fold_convert (etype
, low
));
3980 if (operand_equal_p (low
, high
, 0))
3981 return fold_build2 (EQ_EXPR
, type
, exp
,
3982 fold_convert (etype
, low
));
3984 if (integer_zerop (low
))
3986 if (! TYPE_UNSIGNED (etype
))
3988 etype
= lang_hooks
.types
.unsigned_type (etype
);
3989 high
= fold_convert (etype
, high
);
3990 exp
= fold_convert (etype
, exp
);
3992 return build_range_check (type
, exp
, 1, 0, high
);
3995 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3996 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
3998 unsigned HOST_WIDE_INT lo
;
4002 prec
= TYPE_PRECISION (etype
);
4003 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4006 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4010 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4011 lo
= (unsigned HOST_WIDE_INT
) -1;
4014 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4016 if (TYPE_UNSIGNED (etype
))
4018 etype
= lang_hooks
.types
.signed_type (etype
);
4019 exp
= fold_convert (etype
, exp
);
4021 return fold_build2 (GT_EXPR
, type
, exp
,
4022 fold_convert (etype
, integer_zero_node
));
4026 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
4027 if (value
!= 0 && (!flag_wrapv
|| TREE_OVERFLOW (value
))
4028 && ! TYPE_UNSIGNED (etype
))
4030 tree utype
, minv
, maxv
;
4032 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4033 for the type in question, as we rely on this here. */
4034 switch (TREE_CODE (etype
))
4039 /* There is no requirement that LOW be within the range of ETYPE
4040 if the latter is a subtype. It must, however, be within the base
4041 type of ETYPE. So be sure we do the subtraction in that type. */
4042 if (TREE_TYPE (etype
))
4043 etype
= TREE_TYPE (etype
);
4044 utype
= lang_hooks
.types
.unsigned_type (etype
);
4045 maxv
= fold_convert (utype
, TYPE_MAX_VALUE (etype
));
4046 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4047 integer_one_node
, 1);
4048 minv
= fold_convert (utype
, TYPE_MIN_VALUE (etype
));
4049 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4053 high
= fold_convert (etype
, high
);
4054 low
= fold_convert (etype
, low
);
4055 exp
= fold_convert (etype
, exp
);
4056 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
4064 if (value
!= 0 && ! TREE_OVERFLOW (value
))
4066 /* There is no requirement that LOW be within the range of ETYPE
4067 if the latter is a subtype. It must, however, be within the base
4068 type of ETYPE. So be sure we do the subtraction in that type. */
4069 if (INTEGRAL_TYPE_P (etype
) && TREE_TYPE (etype
))
4071 etype
= TREE_TYPE (etype
);
4072 exp
= fold_convert (etype
, exp
);
4073 low
= fold_convert (etype
, low
);
4074 value
= fold_convert (etype
, value
);
4077 return build_range_check (type
,
4078 fold_build2 (MINUS_EXPR
, etype
, exp
, low
),
4079 1, build_int_cst (etype
, 0), value
);
4085 /* Given two ranges, see if we can merge them into one. Return 1 if we
4086 can, 0 if we can't. Set the output range into the specified parameters. */
4089 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4090 tree high0
, int in1_p
, tree low1
, tree high1
)
4098 int lowequal
= ((low0
== 0 && low1
== 0)
4099 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4100 low0
, 0, low1
, 0)));
4101 int highequal
= ((high0
== 0 && high1
== 0)
4102 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4103 high0
, 1, high1
, 1)));
4105 /* Make range 0 be the range that starts first, or ends last if they
4106 start at the same value. Swap them if it isn't. */
4107 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4110 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4111 high1
, 1, high0
, 1))))
4113 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4114 tem
= low0
, low0
= low1
, low1
= tem
;
4115 tem
= high0
, high0
= high1
, high1
= tem
;
4118 /* Now flag two cases, whether the ranges are disjoint or whether the
4119 second range is totally subsumed in the first. Note that the tests
4120 below are simplified by the ones above. */
4121 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4122 high0
, 1, low1
, 0));
4123 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4124 high1
, 1, high0
, 1));
4126 /* We now have four cases, depending on whether we are including or
4127 excluding the two ranges. */
4130 /* If they don't overlap, the result is false. If the second range
4131 is a subset it is the result. Otherwise, the range is from the start
4132 of the second to the end of the first. */
4134 in_p
= 0, low
= high
= 0;
4136 in_p
= 1, low
= low1
, high
= high1
;
4138 in_p
= 1, low
= low1
, high
= high0
;
4141 else if (in0_p
&& ! in1_p
)
4143 /* If they don't overlap, the result is the first range. If they are
4144 equal, the result is false. If the second range is a subset of the
4145 first, and the ranges begin at the same place, we go from just after
4146 the end of the first range to the end of the second. If the second
4147 range is not a subset of the first, or if it is a subset and both
4148 ranges end at the same place, the range starts at the start of the
4149 first range and ends just before the second range.
4150 Otherwise, we can't describe this as a single range. */
4152 in_p
= 1, low
= low0
, high
= high0
;
4153 else if (lowequal
&& highequal
)
4154 in_p
= 0, low
= high
= 0;
4155 else if (subset
&& lowequal
)
4157 in_p
= 1, high
= high0
;
4158 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high1
, 0,
4159 integer_one_node
, 0);
4161 else if (! subset
|| highequal
)
4163 in_p
= 1, low
= low0
;
4164 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low1
, 0,
4165 integer_one_node
, 0);
4171 else if (! in0_p
&& in1_p
)
4173 /* If they don't overlap, the result is the second range. If the second
4174 is a subset of the first, the result is false. Otherwise,
4175 the range starts just after the first range and ends at the
4176 end of the second. */
4178 in_p
= 1, low
= low1
, high
= high1
;
4179 else if (subset
|| highequal
)
4180 in_p
= 0, low
= high
= 0;
4183 in_p
= 1, high
= high1
;
4184 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high0
, 1,
4185 integer_one_node
, 0);
4191 /* The case where we are excluding both ranges. Here the complex case
4192 is if they don't overlap. In that case, the only time we have a
4193 range is if they are adjacent. If the second is a subset of the
4194 first, the result is the first. Otherwise, the range to exclude
4195 starts at the beginning of the first range and ends at the end of the
4199 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4200 range_binop (PLUS_EXPR
, NULL_TREE
,
4202 integer_one_node
, 1),
4204 in_p
= 0, low
= low0
, high
= high1
;
4207 /* Canonicalize - [min, x] into - [-, x]. */
4208 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4209 switch (TREE_CODE (TREE_TYPE (low0
)))
4212 if (TYPE_PRECISION (TREE_TYPE (low0
))
4213 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4218 if (tree_int_cst_equal (low0
,
4219 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4223 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4224 && integer_zerop (low0
))
4231 /* Canonicalize - [x, max] into - [x, -]. */
4232 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4233 switch (TREE_CODE (TREE_TYPE (high1
)))
4236 if (TYPE_PRECISION (TREE_TYPE (high1
))
4237 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4242 if (tree_int_cst_equal (high1
,
4243 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4247 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4248 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4250 integer_one_node
, 1)))
4257 /* The ranges might be also adjacent between the maximum and
4258 minimum values of the given type. For
4259 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4260 return + [x + 1, y - 1]. */
4261 if (low0
== 0 && high1
== 0)
4263 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high0
, 1,
4264 integer_one_node
, 1);
4265 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low1
, 0,
4266 integer_one_node
, 0);
4267 if (low
== 0 || high
== 0)
4277 in_p
= 0, low
= low0
, high
= high0
;
4279 in_p
= 0, low
= low0
, high
= high1
;
4282 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4287 /* Subroutine of fold, looking inside expressions of the form
4288 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4289 of the COND_EXPR. This function is being used also to optimize
4290 A op B ? C : A, by reversing the comparison first.
4292 Return a folded expression whose code is not a COND_EXPR
4293 anymore, or NULL_TREE if no folding opportunity is found. */
4296 fold_cond_expr_with_comparison (tree type
, tree arg0
, tree arg1
, tree arg2
)
4298 enum tree_code comp_code
= TREE_CODE (arg0
);
4299 tree arg00
= TREE_OPERAND (arg0
, 0);
4300 tree arg01
= TREE_OPERAND (arg0
, 1);
4301 tree arg1_type
= TREE_TYPE (arg1
);
4307 /* If we have A op 0 ? A : -A, consider applying the following
4310 A == 0? A : -A same as -A
4311 A != 0? A : -A same as A
4312 A >= 0? A : -A same as abs (A)
4313 A > 0? A : -A same as abs (A)
4314 A <= 0? A : -A same as -abs (A)
4315 A < 0? A : -A same as -abs (A)
4317 None of these transformations work for modes with signed
4318 zeros. If A is +/-0, the first two transformations will
4319 change the sign of the result (from +0 to -0, or vice
4320 versa). The last four will fix the sign of the result,
4321 even though the original expressions could be positive or
4322 negative, depending on the sign of A.
4324 Note that all these transformations are correct if A is
4325 NaN, since the two alternatives (A and -A) are also NaNs. */
4326 if ((FLOAT_TYPE_P (TREE_TYPE (arg01
))
4327 ? real_zerop (arg01
)
4328 : integer_zerop (arg01
))
4329 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4330 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4331 /* In the case that A is of the form X-Y, '-A' (arg2) may
4332 have already been folded to Y-X, check for that. */
4333 || (TREE_CODE (arg1
) == MINUS_EXPR
4334 && TREE_CODE (arg2
) == MINUS_EXPR
4335 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4336 TREE_OPERAND (arg2
, 1), 0)
4337 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4338 TREE_OPERAND (arg2
, 0), 0))))
4343 tem
= fold_convert (arg1_type
, arg1
);
4344 return pedantic_non_lvalue (fold_convert (type
, negate_expr (tem
)));
4347 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4350 if (flag_trapping_math
)
4355 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4356 arg1
= fold_convert (lang_hooks
.types
.signed_type
4357 (TREE_TYPE (arg1
)), arg1
);
4358 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4359 return pedantic_non_lvalue (fold_convert (type
, tem
));
4362 if (flag_trapping_math
)
4366 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4367 arg1
= fold_convert (lang_hooks
.types
.signed_type
4368 (TREE_TYPE (arg1
)), arg1
);
4369 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4370 return negate_expr (fold_convert (type
, tem
));
4372 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4376 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4377 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4378 both transformations are correct when A is NaN: A != 0
4379 is then true, and A == 0 is false. */
4381 if (integer_zerop (arg01
) && integer_zerop (arg2
))
4383 if (comp_code
== NE_EXPR
)
4384 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4385 else if (comp_code
== EQ_EXPR
)
4386 return fold_convert (type
, integer_zero_node
);
4389 /* Try some transformations of A op B ? A : B.
4391 A == B? A : B same as B
4392 A != B? A : B same as A
4393 A >= B? A : B same as max (A, B)
4394 A > B? A : B same as max (B, A)
4395 A <= B? A : B same as min (A, B)
4396 A < B? A : B same as min (B, A)
4398 As above, these transformations don't work in the presence
4399 of signed zeros. For example, if A and B are zeros of
4400 opposite sign, the first two transformations will change
4401 the sign of the result. In the last four, the original
4402 expressions give different results for (A=+0, B=-0) and
4403 (A=-0, B=+0), but the transformed expressions do not.
4405 The first two transformations are correct if either A or B
4406 is a NaN. In the first transformation, the condition will
4407 be false, and B will indeed be chosen. In the case of the
4408 second transformation, the condition A != B will be true,
4409 and A will be chosen.
4411 The conversions to max() and min() are not correct if B is
4412 a number and A is not. The conditions in the original
4413 expressions will be false, so all four give B. The min()
4414 and max() versions would give a NaN instead. */
4415 if (operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4416 /* Avoid these transformations if the COND_EXPR may be used
4417 as an lvalue in the C++ front-end. PR c++/19199. */
4419 || strcmp (lang_hooks
.name
, "GNU C++") != 0
4420 || ! maybe_lvalue_p (arg1
)
4421 || ! maybe_lvalue_p (arg2
)))
4423 tree comp_op0
= arg00
;
4424 tree comp_op1
= arg01
;
4425 tree comp_type
= TREE_TYPE (comp_op0
);
4427 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4428 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4438 return pedantic_non_lvalue (fold_convert (type
, arg2
));
4440 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4445 /* In C++ a ?: expression can be an lvalue, so put the
4446 operand which will be used if they are equal first
4447 so that we can convert this back to the
4448 corresponding COND_EXPR. */
4449 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4451 comp_op0
= fold_convert (comp_type
, comp_op0
);
4452 comp_op1
= fold_convert (comp_type
, comp_op1
);
4453 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4454 ? fold_build2 (MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4455 : fold_build2 (MIN_EXPR
, comp_type
, comp_op1
, comp_op0
);
4456 return pedantic_non_lvalue (fold_convert (type
, tem
));
4463 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4465 comp_op0
= fold_convert (comp_type
, comp_op0
);
4466 comp_op1
= fold_convert (comp_type
, comp_op1
);
4467 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4468 ? fold_build2 (MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4469 : fold_build2 (MAX_EXPR
, comp_type
, comp_op1
, comp_op0
);
4470 return pedantic_non_lvalue (fold_convert (type
, tem
));
4474 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4475 return pedantic_non_lvalue (fold_convert (type
, arg2
));
4478 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4479 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4482 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4487 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4488 we might still be able to simplify this. For example,
4489 if C1 is one less or one more than C2, this might have started
4490 out as a MIN or MAX and been transformed by this function.
4491 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4493 if (INTEGRAL_TYPE_P (type
)
4494 && TREE_CODE (arg01
) == INTEGER_CST
4495 && TREE_CODE (arg2
) == INTEGER_CST
)
4499 /* We can replace A with C1 in this case. */
4500 arg1
= fold_convert (type
, arg01
);
4501 return fold_build3 (COND_EXPR
, type
, arg0
, arg1
, arg2
);
4504 /* If C1 is C2 + 1, this is min(A, C2). */
4505 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4507 && operand_equal_p (arg01
,
4508 const_binop (PLUS_EXPR
, arg2
,
4509 integer_one_node
, 0),
4511 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
4516 /* If C1 is C2 - 1, this is min(A, C2). */
4517 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4519 && operand_equal_p (arg01
,
4520 const_binop (MINUS_EXPR
, arg2
,
4521 integer_one_node
, 0),
4523 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
4528 /* If C1 is C2 - 1, this is max(A, C2). */
4529 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4531 && operand_equal_p (arg01
,
4532 const_binop (MINUS_EXPR
, arg2
,
4533 integer_one_node
, 0),
4535 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
4540 /* If C1 is C2 + 1, this is max(A, C2). */
4541 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4543 && operand_equal_p (arg01
,
4544 const_binop (PLUS_EXPR
, arg2
,
4545 integer_one_node
, 0),
4547 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
4561 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4562 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4565 /* EXP is some logical combination of boolean tests. See if we can
4566 merge it into some range test. Return the new tree if so. */
4569 fold_range_test (enum tree_code code
, tree type
, tree op0
, tree op1
)
4571 int or_op
= (code
== TRUTH_ORIF_EXPR
4572 || code
== TRUTH_OR_EXPR
);
4573 int in0_p
, in1_p
, in_p
;
4574 tree low0
, low1
, low
, high0
, high1
, high
;
4575 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
);
4576 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
);
4579 /* If this is an OR operation, invert both sides; we will invert
4580 again at the end. */
4582 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4584 /* If both expressions are the same, if we can merge the ranges, and we
4585 can build the range test, return it or it inverted. If one of the
4586 ranges is always true or always false, consider it to be the same
4587 expression as the other. */
4588 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4589 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4591 && 0 != (tem
= (build_range_check (type
,
4593 : rhs
!= 0 ? rhs
: integer_zero_node
,
4595 return or_op
? invert_truthvalue (tem
) : tem
;
4597 /* On machines where the branch cost is expensive, if this is a
4598 short-circuited branch and the underlying object on both sides
4599 is the same, make a non-short-circuit operation. */
4600 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4601 && lhs
!= 0 && rhs
!= 0
4602 && (code
== TRUTH_ANDIF_EXPR
4603 || code
== TRUTH_ORIF_EXPR
)
4604 && operand_equal_p (lhs
, rhs
, 0))
4606 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4607 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4608 which cases we can't do this. */
4609 if (simple_operand_p (lhs
))
4610 return build2 (code
== TRUTH_ANDIF_EXPR
4611 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4614 else if (lang_hooks
.decls
.global_bindings_p () == 0
4615 && ! CONTAINS_PLACEHOLDER_P (lhs
))
4617 tree common
= save_expr (lhs
);
4619 if (0 != (lhs
= build_range_check (type
, common
,
4620 or_op
? ! in0_p
: in0_p
,
4622 && (0 != (rhs
= build_range_check (type
, common
,
4623 or_op
? ! in1_p
: in1_p
,
4625 return build2 (code
== TRUTH_ANDIF_EXPR
4626 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4634 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4635 bit value. Arrange things so the extra bits will be set to zero if and
4636 only if C is signed-extended to its full width. If MASK is nonzero,
4637 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4640 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4642 tree type
= TREE_TYPE (c
);
4643 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
4646 if (p
== modesize
|| unsignedp
)
4649 /* We work by getting just the sign bit into the low-order bit, then
4650 into the high-order bit, then sign-extend. We then XOR that value
4652 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1), 0);
4653 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1), 0);
4655 /* We must use a signed type in order to get an arithmetic right shift.
4656 However, we must also avoid introducing accidental overflows, so that
4657 a subsequent call to integer_zerop will work. Hence we must
4658 do the type conversion here. At this point, the constant is either
4659 zero or one, and the conversion to a signed type can never overflow.
4660 We could get an overflow if this conversion is done anywhere else. */
4661 if (TYPE_UNSIGNED (type
))
4662 temp
= fold_convert (lang_hooks
.types
.signed_type (type
), temp
);
4664 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1), 0);
4665 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1), 0);
4667 temp
= const_binop (BIT_AND_EXPR
, temp
,
4668 fold_convert (TREE_TYPE (c
), mask
), 0);
4669 /* If necessary, convert the type back to match the type of C. */
4670 if (TYPE_UNSIGNED (type
))
4671 temp
= fold_convert (type
, temp
);
4673 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
, 0));
4676 /* Find ways of folding logical expressions of LHS and RHS:
4677 Try to merge two comparisons to the same innermost item.
4678 Look for range tests like "ch >= '0' && ch <= '9'".
4679 Look for combinations of simple terms on machines with expensive branches
4680 and evaluate the RHS unconditionally.
4682 For example, if we have p->a == 2 && p->b == 4 and we can make an
4683 object large enough to span both A and B, we can do this with a comparison
4684 against the object ANDed with the a mask.
4686 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4687 operations to do this with one comparison.
4689 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4690 function and the one above.
4692 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4693 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4695 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4698 We return the simplified tree or 0 if no optimization is possible. */
4701 fold_truthop (enum tree_code code
, tree truth_type
, tree lhs
, tree rhs
)
4703 /* If this is the "or" of two comparisons, we can do something if
4704 the comparisons are NE_EXPR. If this is the "and", we can do something
4705 if the comparisons are EQ_EXPR. I.e.,
4706 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4708 WANTED_CODE is this operation code. For single bit fields, we can
4709 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4710 comparison for one-bit fields. */
4712 enum tree_code wanted_code
;
4713 enum tree_code lcode
, rcode
;
4714 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
4715 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
4716 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
4717 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
4718 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
4719 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
4720 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
4721 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
4722 enum machine_mode lnmode
, rnmode
;
4723 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
4724 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
4725 tree l_const
, r_const
;
4726 tree lntype
, rntype
, result
;
4727 int first_bit
, end_bit
;
4730 /* Start by getting the comparison codes. Fail if anything is volatile.
4731 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4732 it were surrounded with a NE_EXPR. */
4734 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
4737 lcode
= TREE_CODE (lhs
);
4738 rcode
= TREE_CODE (rhs
);
4740 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
4742 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
4743 fold_convert (TREE_TYPE (lhs
), integer_zero_node
));
4747 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
4749 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
4750 fold_convert (TREE_TYPE (rhs
), integer_zero_node
));
4754 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
4755 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
4758 ll_arg
= TREE_OPERAND (lhs
, 0);
4759 lr_arg
= TREE_OPERAND (lhs
, 1);
4760 rl_arg
= TREE_OPERAND (rhs
, 0);
4761 rr_arg
= TREE_OPERAND (rhs
, 1);
4763 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4764 if (simple_operand_p (ll_arg
)
4765 && simple_operand_p (lr_arg
))
4768 if (operand_equal_p (ll_arg
, rl_arg
, 0)
4769 && operand_equal_p (lr_arg
, rr_arg
, 0))
4771 result
= combine_comparisons (code
, lcode
, rcode
,
4772 truth_type
, ll_arg
, lr_arg
);
4776 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
4777 && operand_equal_p (lr_arg
, rl_arg
, 0))
4779 result
= combine_comparisons (code
, lcode
,
4780 swap_tree_comparison (rcode
),
4781 truth_type
, ll_arg
, lr_arg
);
4787 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
4788 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
4790 /* If the RHS can be evaluated unconditionally and its operands are
4791 simple, it wins to evaluate the RHS unconditionally on machines
4792 with expensive branches. In this case, this isn't a comparison
4793 that can be merged. Avoid doing this if the RHS is a floating-point
4794 comparison since those can trap. */
4796 if (BRANCH_COST
>= 2
4797 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
4798 && simple_operand_p (rl_arg
)
4799 && simple_operand_p (rr_arg
))
4801 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4802 if (code
== TRUTH_OR_EXPR
4803 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
4804 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
4805 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
4806 return build2 (NE_EXPR
, truth_type
,
4807 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
4809 fold_convert (TREE_TYPE (ll_arg
), integer_zero_node
));
4811 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4812 if (code
== TRUTH_AND_EXPR
4813 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
4814 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
4815 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
4816 return build2 (EQ_EXPR
, truth_type
,
4817 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
4819 fold_convert (TREE_TYPE (ll_arg
), integer_zero_node
));
4821 if (LOGICAL_OP_NON_SHORT_CIRCUIT
)
4822 return build2 (code
, truth_type
, lhs
, rhs
);
4825 /* See if the comparisons can be merged. Then get all the parameters for
4828 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
4829 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
4833 ll_inner
= decode_field_reference (ll_arg
,
4834 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
4835 &ll_unsignedp
, &volatilep
, &ll_mask
,
4837 lr_inner
= decode_field_reference (lr_arg
,
4838 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
4839 &lr_unsignedp
, &volatilep
, &lr_mask
,
4841 rl_inner
= decode_field_reference (rl_arg
,
4842 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
4843 &rl_unsignedp
, &volatilep
, &rl_mask
,
4845 rr_inner
= decode_field_reference (rr_arg
,
4846 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
4847 &rr_unsignedp
, &volatilep
, &rr_mask
,
4850 /* It must be true that the inner operation on the lhs of each
4851 comparison must be the same if we are to be able to do anything.
4852 Then see if we have constants. If not, the same must be true for
4854 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
4855 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
4858 if (TREE_CODE (lr_arg
) == INTEGER_CST
4859 && TREE_CODE (rr_arg
) == INTEGER_CST
)
4860 l_const
= lr_arg
, r_const
= rr_arg
;
4861 else if (lr_inner
== 0 || rr_inner
== 0
4862 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
4865 l_const
= r_const
= 0;
4867 /* If either comparison code is not correct for our logical operation,
4868 fail. However, we can convert a one-bit comparison against zero into
4869 the opposite comparison against that bit being set in the field. */
4871 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
4872 if (lcode
!= wanted_code
)
4874 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
4876 /* Make the left operand unsigned, since we are only interested
4877 in the value of one bit. Otherwise we are doing the wrong
4886 /* This is analogous to the code for l_const above. */
4887 if (rcode
!= wanted_code
)
4889 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
4898 /* After this point all optimizations will generate bit-field
4899 references, which we might not want. */
4900 if (! lang_hooks
.can_use_bit_fields_p ())
4903 /* See if we can find a mode that contains both fields being compared on
4904 the left. If we can't, fail. Otherwise, update all constants and masks
4905 to be relative to a field of that size. */
4906 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
4907 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
4908 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
4909 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
4911 if (lnmode
== VOIDmode
)
4914 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
4915 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
4916 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
4917 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
4919 if (BYTES_BIG_ENDIAN
)
4921 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
4922 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
4925 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, ll_mask
),
4926 size_int (xll_bitpos
), 0);
4927 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, rl_mask
),
4928 size_int (xrl_bitpos
), 0);
4932 l_const
= fold_convert (lntype
, l_const
);
4933 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
4934 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
), 0);
4935 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
4936 fold_build1 (BIT_NOT_EXPR
,
4940 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
4942 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
4947 r_const
= fold_convert (lntype
, r_const
);
4948 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
4949 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
), 0);
4950 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
4951 fold_build1 (BIT_NOT_EXPR
,
4955 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
4957 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
4961 /* If the right sides are not constant, do the same for it. Also,
4962 disallow this optimization if a size or signedness mismatch occurs
4963 between the left and right sides. */
4966 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
4967 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
4968 /* Make sure the two fields on the right
4969 correspond to the left without being swapped. */
4970 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
4973 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
4974 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
4975 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
4976 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
4978 if (rnmode
== VOIDmode
)
4981 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
4982 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
4983 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
4984 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
4986 if (BYTES_BIG_ENDIAN
)
4988 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
4989 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
4992 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, lr_mask
),
4993 size_int (xlr_bitpos
), 0);
4994 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, rr_mask
),
4995 size_int (xrr_bitpos
), 0);
4997 /* Make a mask that corresponds to both fields being compared.
4998 Do this for both items being compared. If the operands are the
4999 same size and the bits being compared are in the same position
5000 then we can do this by masking both and comparing the masked
5002 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
5003 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
, 0);
5004 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5006 lhs
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5007 ll_unsignedp
|| rl_unsignedp
);
5008 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5009 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5011 rhs
= make_bit_field_ref (lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5012 lr_unsignedp
|| rr_unsignedp
);
5013 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5014 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5016 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
5019 /* There is still another way we can do something: If both pairs of
5020 fields being compared are adjacent, we may be able to make a wider
5021 field containing them both.
5023 Note that we still must mask the lhs/rhs expressions. Furthermore,
5024 the mask must be shifted to account for the shift done by
5025 make_bit_field_ref. */
5026 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5027 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5028 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5029 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5033 lhs
= make_bit_field_ref (ll_inner
, lntype
, ll_bitsize
+ rl_bitsize
,
5034 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5035 rhs
= make_bit_field_ref (lr_inner
, rntype
, lr_bitsize
+ rr_bitsize
,
5036 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5038 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5039 size_int (MIN (xll_bitpos
, xrl_bitpos
)), 0);
5040 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5041 size_int (MIN (xlr_bitpos
, xrr_bitpos
)), 0);
5043 /* Convert to the smaller type before masking out unwanted bits. */
5045 if (lntype
!= rntype
)
5047 if (lnbitsize
> rnbitsize
)
5049 lhs
= fold_convert (rntype
, lhs
);
5050 ll_mask
= fold_convert (rntype
, ll_mask
);
5053 else if (lnbitsize
< rnbitsize
)
5055 rhs
= fold_convert (lntype
, rhs
);
5056 lr_mask
= fold_convert (lntype
, lr_mask
);
5061 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5062 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5064 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5065 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5067 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
5073 /* Handle the case of comparisons with constants. If there is something in
5074 common between the masks, those bits of the constants must be the same.
5075 If not, the condition is always false. Test for this to avoid generating
5076 incorrect code below. */
5077 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
5078 if (! integer_zerop (result
)
5079 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
5080 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
5082 if (wanted_code
== NE_EXPR
)
5084 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5085 return constant_boolean_node (true, truth_type
);
5089 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5090 return constant_boolean_node (false, truth_type
);
5094 /* Construct the expression we will return. First get the component
5095 reference we will make. Unless the mask is all ones the width of
5096 that field, perform the mask operation. Then compare with the
5098 result
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5099 ll_unsignedp
|| rl_unsignedp
);
5101 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
5102 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5103 result
= build2 (BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5105 return build2 (wanted_code
, truth_type
, result
,
5106 const_binop (BIT_IOR_EXPR
, l_const
, r_const
, 0));
5109 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5113 optimize_minmax_comparison (enum tree_code code
, tree type
, tree op0
, tree op1
)
5116 enum tree_code op_code
;
5117 tree comp_const
= op1
;
5119 int consts_equal
, consts_lt
;
5122 STRIP_SIGN_NOPS (arg0
);
5124 op_code
= TREE_CODE (arg0
);
5125 minmax_const
= TREE_OPERAND (arg0
, 1);
5126 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5127 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5128 inner
= TREE_OPERAND (arg0
, 0);
5130 /* If something does not permit us to optimize, return the original tree. */
5131 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5132 || TREE_CODE (comp_const
) != INTEGER_CST
5133 || TREE_CONSTANT_OVERFLOW (comp_const
)
5134 || TREE_CODE (minmax_const
) != INTEGER_CST
5135 || TREE_CONSTANT_OVERFLOW (minmax_const
))
5138 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5139 and GT_EXPR, doing the rest with recursive calls using logical
5143 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5145 /* FIXME: We should be able to invert code without building a
5146 scratch tree node, but doing so would require us to
5147 duplicate a part of invert_truthvalue here. */
5148 tree tem
= invert_truthvalue (build2 (code
, type
, op0
, op1
));
5149 tem
= optimize_minmax_comparison (TREE_CODE (tem
),
5151 TREE_OPERAND (tem
, 0),
5152 TREE_OPERAND (tem
, 1));
5153 return invert_truthvalue (tem
);
5158 fold_build2 (TRUTH_ORIF_EXPR
, type
,
5159 optimize_minmax_comparison
5160 (EQ_EXPR
, type
, arg0
, comp_const
),
5161 optimize_minmax_comparison
5162 (GT_EXPR
, type
, arg0
, comp_const
));
5165 if (op_code
== MAX_EXPR
&& consts_equal
)
5166 /* MAX (X, 0) == 0 -> X <= 0 */
5167 return fold_build2 (LE_EXPR
, type
, inner
, comp_const
);
5169 else if (op_code
== MAX_EXPR
&& consts_lt
)
5170 /* MAX (X, 0) == 5 -> X == 5 */
5171 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5173 else if (op_code
== MAX_EXPR
)
5174 /* MAX (X, 0) == -1 -> false */
5175 return omit_one_operand (type
, integer_zero_node
, inner
);
5177 else if (consts_equal
)
5178 /* MIN (X, 0) == 0 -> X >= 0 */
5179 return fold_build2 (GE_EXPR
, type
, inner
, comp_const
);
5182 /* MIN (X, 0) == 5 -> false */
5183 return omit_one_operand (type
, integer_zero_node
, inner
);
5186 /* MIN (X, 0) == -1 -> X == -1 */
5187 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5190 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5191 /* MAX (X, 0) > 0 -> X > 0
5192 MAX (X, 0) > 5 -> X > 5 */
5193 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5195 else if (op_code
== MAX_EXPR
)
5196 /* MAX (X, 0) > -1 -> true */
5197 return omit_one_operand (type
, integer_one_node
, inner
);
5199 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5200 /* MIN (X, 0) > 0 -> false
5201 MIN (X, 0) > 5 -> false */
5202 return omit_one_operand (type
, integer_zero_node
, inner
);
5205 /* MIN (X, 0) > -1 -> X > -1 */
5206 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5213 /* T is an integer expression that is being multiplied, divided, or taken a
5214 modulus (CODE says which and what kind of divide or modulus) by a
5215 constant C. See if we can eliminate that operation by folding it with
5216 other operations already in T. WIDE_TYPE, if non-null, is a type that
5217 should be used for the computation if wider than our type.
5219 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5220 (X * 2) + (Y * 4). We must, however, be assured that either the original
5221 expression would not overflow or that overflow is undefined for the type
5222 in the language in question.
5224 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5225 the machine has a multiply-accumulate insn or that this is part of an
5226 addressing calculation.
5228 If we return a non-null expression, it is an equivalent form of the
5229 original computation, but need not be in the original type. */
5232 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
)
5234 /* To avoid exponential search depth, refuse to allow recursion past
5235 three levels. Beyond that (1) it's highly unlikely that we'll find
5236 something interesting and (2) we've probably processed it before
5237 when we built the inner expression. */
5246 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
);
5253 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
)
5255 tree type
= TREE_TYPE (t
);
5256 enum tree_code tcode
= TREE_CODE (t
);
5257 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5258 > GET_MODE_SIZE (TYPE_MODE (type
)))
5259 ? wide_type
: type
);
5261 int same_p
= tcode
== code
;
5262 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5264 /* Don't deal with constants of zero here; they confuse the code below. */
5265 if (integer_zerop (c
))
5268 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5269 op0
= TREE_OPERAND (t
, 0);
5271 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5272 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5274 /* Note that we need not handle conditional operations here since fold
5275 already handles those cases. So just do arithmetic here. */
5279 /* For a constant, we can always simplify if we are a multiply
5280 or (for divide and modulus) if it is a multiple of our constant. */
5281 if (code
== MULT_EXPR
5282 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
, 0)))
5283 return const_binop (code
, fold_convert (ctype
, t
),
5284 fold_convert (ctype
, c
), 0);
5287 case CONVERT_EXPR
: case NON_LVALUE_EXPR
: case NOP_EXPR
:
5288 /* If op0 is an expression ... */
5289 if ((COMPARISON_CLASS_P (op0
)
5290 || UNARY_CLASS_P (op0
)
5291 || BINARY_CLASS_P (op0
)
5292 || EXPRESSION_CLASS_P (op0
))
5293 /* ... and is unsigned, and its type is smaller than ctype,
5294 then we cannot pass through as widening. */
5295 && ((TYPE_UNSIGNED (TREE_TYPE (op0
))
5296 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
5297 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
5298 && (GET_MODE_SIZE (TYPE_MODE (ctype
))
5299 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
)))))
5300 /* ... or this is a truncation (t is narrower than op0),
5301 then we cannot pass through this narrowing. */
5302 || (GET_MODE_SIZE (TYPE_MODE (type
))
5303 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
))))
5304 /* ... or signedness changes for division or modulus,
5305 then we cannot pass through this conversion. */
5306 || (code
!= MULT_EXPR
5307 && (TYPE_UNSIGNED (ctype
)
5308 != TYPE_UNSIGNED (TREE_TYPE (op0
))))))
5311 /* Pass the constant down and see if we can make a simplification. If
5312 we can, replace this expression with the inner simplification for
5313 possible later conversion to our or some other type. */
5314 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5315 && TREE_CODE (t2
) == INTEGER_CST
5316 && ! TREE_CONSTANT_OVERFLOW (t2
)
5317 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5319 ? ctype
: NULL_TREE
))))
5324 /* If widening the type changes it from signed to unsigned, then we
5325 must avoid building ABS_EXPR itself as unsigned. */
5326 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5328 tree cstype
= (*lang_hooks
.types
.signed_type
) (ctype
);
5329 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
)) != 0)
5331 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5332 return fold_convert (ctype
, t1
);
5338 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
5339 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5342 case MIN_EXPR
: case MAX_EXPR
:
5343 /* If widening the type changes the signedness, then we can't perform
5344 this optimization as that changes the result. */
5345 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5348 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5349 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0
5350 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
5352 if (tree_int_cst_sgn (c
) < 0)
5353 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5355 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5356 fold_convert (ctype
, t2
));
5360 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5361 /* If the second operand is constant, this is a multiplication
5362 or floor division, by a power of two, so we can treat it that
5363 way unless the multiplier or divisor overflows. Signed
5364 left-shift overflow is implementation-defined rather than
5365 undefined in C90, so do not convert signed left shift into
5367 if (TREE_CODE (op1
) == INTEGER_CST
5368 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5369 /* const_binop may not detect overflow correctly,
5370 so check for it explicitly here. */
5371 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5372 && TREE_INT_CST_HIGH (op1
) == 0
5373 && 0 != (t1
= fold_convert (ctype
,
5374 const_binop (LSHIFT_EXPR
,
5377 && ! TREE_OVERFLOW (t1
))
5378 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5379 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5380 ctype
, fold_convert (ctype
, op0
), t1
),
5381 c
, code
, wide_type
);
5384 case PLUS_EXPR
: case MINUS_EXPR
:
5385 /* See if we can eliminate the operation on both sides. If we can, we
5386 can return a new PLUS or MINUS. If we can't, the only remaining
5387 cases where we can do anything are if the second operand is a
5389 t1
= extract_muldiv (op0
, c
, code
, wide_type
);
5390 t2
= extract_muldiv (op1
, c
, code
, wide_type
);
5391 if (t1
!= 0 && t2
!= 0
5392 && (code
== MULT_EXPR
5393 /* If not multiplication, we can only do this if both operands
5394 are divisible by c. */
5395 || (multiple_of_p (ctype
, op0
, c
)
5396 && multiple_of_p (ctype
, op1
, c
))))
5397 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5398 fold_convert (ctype
, t2
));
5400 /* If this was a subtraction, negate OP1 and set it to be an addition.
5401 This simplifies the logic below. */
5402 if (tcode
== MINUS_EXPR
)
5403 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5405 if (TREE_CODE (op1
) != INTEGER_CST
)
5408 /* If either OP1 or C are negative, this optimization is not safe for
5409 some of the division and remainder types while for others we need
5410 to change the code. */
5411 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5413 if (code
== CEIL_DIV_EXPR
)
5414 code
= FLOOR_DIV_EXPR
;
5415 else if (code
== FLOOR_DIV_EXPR
)
5416 code
= CEIL_DIV_EXPR
;
5417 else if (code
!= MULT_EXPR
5418 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5422 /* If it's a multiply or a division/modulus operation of a multiple
5423 of our constant, do the operation and verify it doesn't overflow. */
5424 if (code
== MULT_EXPR
5425 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5427 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5428 fold_convert (ctype
, c
), 0);
5429 /* We allow the constant to overflow with wrapping semantics. */
5431 || (TREE_OVERFLOW (op1
) && ! flag_wrapv
))
5437 /* If we have an unsigned type is not a sizetype, we cannot widen
5438 the operation since it will change the result if the original
5439 computation overflowed. */
5440 if (TYPE_UNSIGNED (ctype
)
5441 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
5445 /* If we were able to eliminate our operation from the first side,
5446 apply our operation to the second side and reform the PLUS. */
5447 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5448 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5450 /* The last case is if we are a multiply. In that case, we can
5451 apply the distributive law to commute the multiply and addition
5452 if the multiplication of the constants doesn't overflow. */
5453 if (code
== MULT_EXPR
)
5454 return fold_build2 (tcode
, ctype
,
5455 fold_build2 (code
, ctype
,
5456 fold_convert (ctype
, op0
),
5457 fold_convert (ctype
, c
)),
5463 /* We have a special case here if we are doing something like
5464 (C * 8) % 4 since we know that's zero. */
5465 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5466 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5467 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5468 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5469 return omit_one_operand (type
, integer_zero_node
, op0
);
5471 /* ... fall through ... */
5473 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5474 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5475 /* If we can extract our operation from the LHS, do so and return a
5476 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5477 do something only if the second operand is a constant. */
5479 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
5480 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5481 fold_convert (ctype
, op1
));
5482 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5483 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
5484 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5485 fold_convert (ctype
, t1
));
5486 else if (TREE_CODE (op1
) != INTEGER_CST
)
5489 /* If these are the same operation types, we can associate them
5490 assuming no overflow. */
5492 && 0 != (t1
= const_binop (MULT_EXPR
, fold_convert (ctype
, op1
),
5493 fold_convert (ctype
, c
), 0))
5494 && ! TREE_OVERFLOW (t1
))
5495 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
), t1
);
5497 /* If these operations "cancel" each other, we have the main
5498 optimizations of this pass, which occur when either constant is a
5499 multiple of the other, in which case we replace this with either an
5500 operation or CODE or TCODE.
5502 If we have an unsigned type that is not a sizetype, we cannot do
5503 this since it will change the result if the original computation
5505 if ((! TYPE_UNSIGNED (ctype
)
5506 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
5508 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5509 || (tcode
== MULT_EXPR
5510 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5511 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
)))
5513 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5514 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5515 fold_convert (ctype
,
5516 const_binop (TRUNC_DIV_EXPR
,
5518 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
, 0)))
5519 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
5520 fold_convert (ctype
,
5521 const_binop (TRUNC_DIV_EXPR
,
5533 /* Return a node which has the indicated constant VALUE (either 0 or
5534 1), and is of the indicated TYPE. */
5537 constant_boolean_node (int value
, tree type
)
5539 if (type
== integer_type_node
)
5540 return value
? integer_one_node
: integer_zero_node
;
5541 else if (type
== boolean_type_node
)
5542 return value
? boolean_true_node
: boolean_false_node
;
5544 return build_int_cst (type
, value
);
5548 /* Return true if expr looks like an ARRAY_REF and set base and
5549 offset to the appropriate trees. If there is no offset,
5550 offset is set to NULL_TREE. Base will be canonicalized to
5551 something you can get the element type from using
5552 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5553 in bytes to the base. */
5556 extract_array_ref (tree expr
, tree
*base
, tree
*offset
)
5558 /* One canonical form is a PLUS_EXPR with the first
5559 argument being an ADDR_EXPR with a possible NOP_EXPR
5561 if (TREE_CODE (expr
) == PLUS_EXPR
)
5563 tree op0
= TREE_OPERAND (expr
, 0);
5564 tree inner_base
, dummy1
;
5565 /* Strip NOP_EXPRs here because the C frontends and/or
5566 folders present us (int *)&x.a + 4B possibly. */
5568 if (extract_array_ref (op0
, &inner_base
, &dummy1
))
5571 if (dummy1
== NULL_TREE
)
5572 *offset
= TREE_OPERAND (expr
, 1);
5574 *offset
= fold_build2 (PLUS_EXPR
, TREE_TYPE (expr
),
5575 dummy1
, TREE_OPERAND (expr
, 1));
5579 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5580 which we transform into an ADDR_EXPR with appropriate
5581 offset. For other arguments to the ADDR_EXPR we assume
5582 zero offset and as such do not care about the ADDR_EXPR
5583 type and strip possible nops from it. */
5584 else if (TREE_CODE (expr
) == ADDR_EXPR
)
5586 tree op0
= TREE_OPERAND (expr
, 0);
5587 if (TREE_CODE (op0
) == ARRAY_REF
)
5589 tree idx
= TREE_OPERAND (op0
, 1);
5590 *base
= TREE_OPERAND (op0
, 0);
5591 *offset
= fold_build2 (MULT_EXPR
, TREE_TYPE (idx
), idx
,
5592 array_ref_element_size (op0
));
5596 /* Handle array-to-pointer decay as &a. */
5597 if (TREE_CODE (TREE_TYPE (op0
)) == ARRAY_TYPE
)
5598 *base
= TREE_OPERAND (expr
, 0);
5601 *offset
= NULL_TREE
;
5605 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5606 else if (SSA_VAR_P (expr
)
5607 && TREE_CODE (TREE_TYPE (expr
)) == POINTER_TYPE
)
5610 *offset
= NULL_TREE
;
5618 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5619 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5620 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5621 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5622 COND is the first argument to CODE; otherwise (as in the example
5623 given here), it is the second argument. TYPE is the type of the
5624 original expression. Return NULL_TREE if no simplification is
5628 fold_binary_op_with_conditional_arg (enum tree_code code
,
5629 tree type
, tree op0
, tree op1
,
5630 tree cond
, tree arg
, int cond_first_p
)
5632 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
5633 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
5634 tree test
, true_value
, false_value
;
5635 tree lhs
= NULL_TREE
;
5636 tree rhs
= NULL_TREE
;
5638 /* This transformation is only worthwhile if we don't have to wrap
5639 arg in a SAVE_EXPR, and the operation can be simplified on at least
5640 one of the branches once its pushed inside the COND_EXPR. */
5641 if (!TREE_CONSTANT (arg
))
5644 if (TREE_CODE (cond
) == COND_EXPR
)
5646 test
= TREE_OPERAND (cond
, 0);
5647 true_value
= TREE_OPERAND (cond
, 1);
5648 false_value
= TREE_OPERAND (cond
, 2);
5649 /* If this operand throws an expression, then it does not make
5650 sense to try to perform a logical or arithmetic operation
5652 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
5654 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
5659 tree testtype
= TREE_TYPE (cond
);
5661 true_value
= constant_boolean_node (true, testtype
);
5662 false_value
= constant_boolean_node (false, testtype
);
5665 arg
= fold_convert (arg_type
, arg
);
5668 true_value
= fold_convert (cond_type
, true_value
);
5670 lhs
= fold_build2 (code
, type
, true_value
, arg
);
5672 lhs
= fold_build2 (code
, type
, arg
, true_value
);
5676 false_value
= fold_convert (cond_type
, false_value
);
5678 rhs
= fold_build2 (code
, type
, false_value
, arg
);
5680 rhs
= fold_build2 (code
, type
, arg
, false_value
);
5683 test
= fold_build3 (COND_EXPR
, type
, test
, lhs
, rhs
);
5684 return fold_convert (type
, test
);
5688 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5690 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5691 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5692 ADDEND is the same as X.
5694 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5695 and finite. The problematic cases are when X is zero, and its mode
5696 has signed zeros. In the case of rounding towards -infinity,
5697 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5698 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5701 fold_real_zero_addition_p (tree type
, tree addend
, int negate
)
5703 if (!real_zerop (addend
))
5706 /* Don't allow the fold with -fsignaling-nans. */
5707 if (HONOR_SNANS (TYPE_MODE (type
)))
5710 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5711 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
5714 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5715 if (TREE_CODE (addend
) == REAL_CST
5716 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
5719 /* The mode has signed zeros, and we have to honor their sign.
5720 In this situation, there is only one case we can return true for.
5721 X - 0 is the same as X unless rounding towards -infinity is
5723 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
5726 /* Subroutine of fold() that checks comparisons of built-in math
5727 functions against real constants.
5729 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5730 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5731 is the type of the result and ARG0 and ARG1 are the operands of the
5732 comparison. ARG1 must be a TREE_REAL_CST.
5734 The function returns the constant folded tree if a simplification
5735 can be made, and NULL_TREE otherwise. */
5738 fold_mathfn_compare (enum built_in_function fcode
, enum tree_code code
,
5739 tree type
, tree arg0
, tree arg1
)
5743 if (BUILTIN_SQRT_P (fcode
))
5745 tree arg
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
5746 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
5748 c
= TREE_REAL_CST (arg1
);
5749 if (REAL_VALUE_NEGATIVE (c
))
5751 /* sqrt(x) < y is always false, if y is negative. */
5752 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
5753 return omit_one_operand (type
, integer_zero_node
, arg
);
5755 /* sqrt(x) > y is always true, if y is negative and we
5756 don't care about NaNs, i.e. negative values of x. */
5757 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
5758 return omit_one_operand (type
, integer_one_node
, arg
);
5760 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5761 return fold_build2 (GE_EXPR
, type
, arg
,
5762 build_real (TREE_TYPE (arg
), dconst0
));
5764 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
5768 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5769 real_convert (&c2
, mode
, &c2
);
5771 if (REAL_VALUE_ISINF (c2
))
5773 /* sqrt(x) > y is x == +Inf, when y is very large. */
5774 if (HONOR_INFINITIES (mode
))
5775 return fold_build2 (EQ_EXPR
, type
, arg
,
5776 build_real (TREE_TYPE (arg
), c2
));
5778 /* sqrt(x) > y is always false, when y is very large
5779 and we don't care about infinities. */
5780 return omit_one_operand (type
, integer_zero_node
, arg
);
5783 /* sqrt(x) > c is the same as x > c*c. */
5784 return fold_build2 (code
, type
, arg
,
5785 build_real (TREE_TYPE (arg
), c2
));
5787 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
5791 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5792 real_convert (&c2
, mode
, &c2
);
5794 if (REAL_VALUE_ISINF (c2
))
5796 /* sqrt(x) < y is always true, when y is a very large
5797 value and we don't care about NaNs or Infinities. */
5798 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
5799 return omit_one_operand (type
, integer_one_node
, arg
);
5801 /* sqrt(x) < y is x != +Inf when y is very large and we
5802 don't care about NaNs. */
5803 if (! HONOR_NANS (mode
))
5804 return fold_build2 (NE_EXPR
, type
, arg
,
5805 build_real (TREE_TYPE (arg
), c2
));
5807 /* sqrt(x) < y is x >= 0 when y is very large and we
5808 don't care about Infinities. */
5809 if (! HONOR_INFINITIES (mode
))
5810 return fold_build2 (GE_EXPR
, type
, arg
,
5811 build_real (TREE_TYPE (arg
), dconst0
));
5813 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5814 if (lang_hooks
.decls
.global_bindings_p () != 0
5815 || CONTAINS_PLACEHOLDER_P (arg
))
5818 arg
= save_expr (arg
);
5819 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
5820 fold_build2 (GE_EXPR
, type
, arg
,
5821 build_real (TREE_TYPE (arg
),
5823 fold_build2 (NE_EXPR
, type
, arg
,
5824 build_real (TREE_TYPE (arg
),
5828 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5829 if (! HONOR_NANS (mode
))
5830 return fold_build2 (code
, type
, arg
,
5831 build_real (TREE_TYPE (arg
), c2
));
5833 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5834 if (lang_hooks
.decls
.global_bindings_p () == 0
5835 && ! CONTAINS_PLACEHOLDER_P (arg
))
5837 arg
= save_expr (arg
);
5838 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
5839 fold_build2 (GE_EXPR
, type
, arg
,
5840 build_real (TREE_TYPE (arg
),
5842 fold_build2 (code
, type
, arg
,
5843 build_real (TREE_TYPE (arg
),
5852 /* Subroutine of fold() that optimizes comparisons against Infinities,
5853 either +Inf or -Inf.
5855 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5856 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5857 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5859 The function returns the constant folded tree if a simplification
5860 can be made, and NULL_TREE otherwise. */
5863 fold_inf_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
5865 enum machine_mode mode
;
5866 REAL_VALUE_TYPE max
;
5870 mode
= TYPE_MODE (TREE_TYPE (arg0
));
5872 /* For negative infinity swap the sense of the comparison. */
5873 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
5875 code
= swap_tree_comparison (code
);
5880 /* x > +Inf is always false, if with ignore sNANs. */
5881 if (HONOR_SNANS (mode
))
5883 return omit_one_operand (type
, integer_zero_node
, arg0
);
5886 /* x <= +Inf is always true, if we don't case about NaNs. */
5887 if (! HONOR_NANS (mode
))
5888 return omit_one_operand (type
, integer_one_node
, arg0
);
5890 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5891 if (lang_hooks
.decls
.global_bindings_p () == 0
5892 && ! CONTAINS_PLACEHOLDER_P (arg0
))
5894 arg0
= save_expr (arg0
);
5895 return fold_build2 (EQ_EXPR
, type
, arg0
, arg0
);
5901 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5902 real_maxval (&max
, neg
, mode
);
5903 return fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
5904 arg0
, build_real (TREE_TYPE (arg0
), max
));
5907 /* x < +Inf is always equal to x <= DBL_MAX. */
5908 real_maxval (&max
, neg
, mode
);
5909 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
5910 arg0
, build_real (TREE_TYPE (arg0
), max
));
5913 /* x != +Inf is always equal to !(x > DBL_MAX). */
5914 real_maxval (&max
, neg
, mode
);
5915 if (! HONOR_NANS (mode
))
5916 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
5917 arg0
, build_real (TREE_TYPE (arg0
), max
));
5919 /* The transformation below creates non-gimple code and thus is
5920 not appropriate if we are in gimple form. */
5924 temp
= fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
5925 arg0
, build_real (TREE_TYPE (arg0
), max
));
5926 return fold_build1 (TRUTH_NOT_EXPR
, type
, temp
);
5935 /* Subroutine of fold() that optimizes comparisons of a division by
5936 a nonzero integer constant against an integer constant, i.e.
5939 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5940 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5941 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5943 The function returns the constant folded tree if a simplification
5944 can be made, and NULL_TREE otherwise. */
5947 fold_div_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
5949 tree prod
, tmp
, hi
, lo
;
5950 tree arg00
= TREE_OPERAND (arg0
, 0);
5951 tree arg01
= TREE_OPERAND (arg0
, 1);
5952 unsigned HOST_WIDE_INT lpart
;
5953 HOST_WIDE_INT hpart
;
5956 /* We have to do this the hard way to detect unsigned overflow.
5957 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5958 overflow
= mul_double (TREE_INT_CST_LOW (arg01
),
5959 TREE_INT_CST_HIGH (arg01
),
5960 TREE_INT_CST_LOW (arg1
),
5961 TREE_INT_CST_HIGH (arg1
), &lpart
, &hpart
);
5962 prod
= build_int_cst_wide (TREE_TYPE (arg00
), lpart
, hpart
);
5963 prod
= force_fit_type (prod
, -1, overflow
, false);
5965 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)))
5967 tmp
= int_const_binop (MINUS_EXPR
, arg01
, integer_one_node
, 0);
5970 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5971 overflow
= add_double (TREE_INT_CST_LOW (prod
),
5972 TREE_INT_CST_HIGH (prod
),
5973 TREE_INT_CST_LOW (tmp
),
5974 TREE_INT_CST_HIGH (tmp
),
5976 hi
= build_int_cst_wide (TREE_TYPE (arg00
), lpart
, hpart
);
5977 hi
= force_fit_type (hi
, -1, overflow
| TREE_OVERFLOW (prod
),
5978 TREE_CONSTANT_OVERFLOW (prod
));
5980 else if (tree_int_cst_sgn (arg01
) >= 0)
5982 tmp
= int_const_binop (MINUS_EXPR
, arg01
, integer_one_node
, 0);
5983 switch (tree_int_cst_sgn (arg1
))
5986 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
5991 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
5996 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6006 /* A negative divisor reverses the relational operators. */
6007 code
= swap_tree_comparison (code
);
6009 tmp
= int_const_binop (PLUS_EXPR
, arg01
, integer_one_node
, 0);
6010 switch (tree_int_cst_sgn (arg1
))
6013 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6018 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6023 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6035 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6036 return omit_one_operand (type
, integer_zero_node
, arg00
);
6037 if (TREE_OVERFLOW (hi
))
6038 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
6039 if (TREE_OVERFLOW (lo
))
6040 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
6041 return build_range_check (type
, arg00
, 1, lo
, hi
);
6044 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6045 return omit_one_operand (type
, integer_one_node
, arg00
);
6046 if (TREE_OVERFLOW (hi
))
6047 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
6048 if (TREE_OVERFLOW (lo
))
6049 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
6050 return build_range_check (type
, arg00
, 0, lo
, hi
);
6053 if (TREE_OVERFLOW (lo
))
6054 return omit_one_operand (type
, integer_zero_node
, arg00
);
6055 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
6058 if (TREE_OVERFLOW (hi
))
6059 return omit_one_operand (type
, integer_one_node
, arg00
);
6060 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
6063 if (TREE_OVERFLOW (hi
))
6064 return omit_one_operand (type
, integer_zero_node
, arg00
);
6065 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
6068 if (TREE_OVERFLOW (lo
))
6069 return omit_one_operand (type
, integer_one_node
, arg00
);
6070 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
6080 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6081 equality/inequality test, then return a simplified form of the test
6082 using a sign testing. Otherwise return NULL. TYPE is the desired
6086 fold_single_bit_test_into_sign_test (enum tree_code code
, tree arg0
, tree arg1
,
6089 /* If this is testing a single bit, we can optimize the test. */
6090 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6091 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6092 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6094 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6095 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6096 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6098 if (arg00
!= NULL_TREE
6099 /* This is only a win if casting to a signed type is cheap,
6100 i.e. when arg00's type is not a partial mode. */
6101 && TYPE_PRECISION (TREE_TYPE (arg00
))
6102 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
6104 tree stype
= lang_hooks
.types
.signed_type (TREE_TYPE (arg00
));
6105 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6106 result_type
, fold_convert (stype
, arg00
),
6107 fold_convert (stype
, integer_zero_node
));
6114 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6115 equality/inequality test, then return a simplified form of
6116 the test using shifts and logical operations. Otherwise return
6117 NULL. TYPE is the desired result type. */
6120 fold_single_bit_test (enum tree_code code
, tree arg0
, tree arg1
,
6123 /* If this is testing a single bit, we can optimize the test. */
6124 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6125 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6126 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6128 tree inner
= TREE_OPERAND (arg0
, 0);
6129 tree type
= TREE_TYPE (arg0
);
6130 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6131 enum machine_mode operand_mode
= TYPE_MODE (type
);
6133 tree signed_type
, unsigned_type
, intermediate_type
;
6136 /* First, see if we can fold the single bit test into a sign-bit
6138 tem
= fold_single_bit_test_into_sign_test (code
, arg0
, arg1
,
6143 /* Otherwise we have (A & C) != 0 where C is a single bit,
6144 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6145 Similarly for (A & C) == 0. */
6147 /* If INNER is a right shift of a constant and it plus BITNUM does
6148 not overflow, adjust BITNUM and INNER. */
6149 if (TREE_CODE (inner
) == RSHIFT_EXPR
6150 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6151 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
6152 && bitnum
< TYPE_PRECISION (type
)
6153 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
6154 bitnum
- TYPE_PRECISION (type
)))
6156 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6157 inner
= TREE_OPERAND (inner
, 0);
6160 /* If we are going to be able to omit the AND below, we must do our
6161 operations as unsigned. If we must use the AND, we have a choice.
6162 Normally unsigned is faster, but for some machines signed is. */
6163 #ifdef LOAD_EXTEND_OP
6164 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6165 && !flag_syntax_only
) ? 0 : 1;
6170 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6171 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6172 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6173 inner
= fold_convert (intermediate_type
, inner
);
6176 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6177 inner
, size_int (bitnum
));
6179 if (code
== EQ_EXPR
)
6180 inner
= fold_build2 (BIT_XOR_EXPR
, intermediate_type
,
6181 inner
, integer_one_node
);
6183 /* Put the AND last so it can combine with more things. */
6184 inner
= build2 (BIT_AND_EXPR
, intermediate_type
,
6185 inner
, integer_one_node
);
6187 /* Make sure to return the proper type. */
6188 inner
= fold_convert (result_type
, inner
);
6195 /* Check whether we are allowed to reorder operands arg0 and arg1,
6196 such that the evaluation of arg1 occurs before arg0. */
6199 reorder_operands_p (tree arg0
, tree arg1
)
6201 if (! flag_evaluation_order
)
6203 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6205 return ! TREE_SIDE_EFFECTS (arg0
)
6206 && ! TREE_SIDE_EFFECTS (arg1
);
6209 /* Test whether it is preferable two swap two operands, ARG0 and
6210 ARG1, for example because ARG0 is an integer constant and ARG1
6211 isn't. If REORDER is true, only recommend swapping if we can
6212 evaluate the operands in reverse order. */
6215 tree_swap_operands_p (tree arg0
, tree arg1
, bool reorder
)
6217 STRIP_SIGN_NOPS (arg0
);
6218 STRIP_SIGN_NOPS (arg1
);
6220 if (TREE_CODE (arg1
) == INTEGER_CST
)
6222 if (TREE_CODE (arg0
) == INTEGER_CST
)
6225 if (TREE_CODE (arg1
) == REAL_CST
)
6227 if (TREE_CODE (arg0
) == REAL_CST
)
6230 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6232 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6235 if (TREE_CONSTANT (arg1
))
6237 if (TREE_CONSTANT (arg0
))
6243 if (reorder
&& flag_evaluation_order
6244 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6252 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6253 for commutative and comparison operators. Ensuring a canonical
6254 form allows the optimizers to find additional redundancies without
6255 having to explicitly check for both orderings. */
6256 if (TREE_CODE (arg0
) == SSA_NAME
6257 && TREE_CODE (arg1
) == SSA_NAME
6258 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6264 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6265 ARG0 is extended to a wider type. */
6268 fold_widened_comparison (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6270 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6272 tree shorter_type
, outer_type
;
6276 if (arg0_unw
== arg0
)
6278 shorter_type
= TREE_TYPE (arg0_unw
);
6280 #ifdef HAVE_canonicalize_funcptr_for_compare
6281 /* Disable this optimization if we're casting a function pointer
6282 type on targets that require function pointer canonicalization. */
6283 if (HAVE_canonicalize_funcptr_for_compare
6284 && TREE_CODE (shorter_type
) == POINTER_TYPE
6285 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6289 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6292 arg1_unw
= get_unwidened (arg1
, shorter_type
);
6294 /* If possible, express the comparison in the shorter mode. */
6295 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6296 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6297 && (TREE_TYPE (arg1_unw
) == shorter_type
6298 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6299 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6300 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6301 && int_fits_type_p (arg1_unw
, shorter_type
))))
6302 return fold_build2 (code
, type
, arg0_unw
,
6303 fold_convert (shorter_type
, arg1_unw
));
6305 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6306 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6307 || !int_fits_type_p (arg1_unw
, shorter_type
))
6310 /* If we are comparing with the integer that does not fit into the range
6311 of the shorter type, the result is known. */
6312 outer_type
= TREE_TYPE (arg1_unw
);
6313 min
= lower_bound_in_type (outer_type
, shorter_type
);
6314 max
= upper_bound_in_type (outer_type
, shorter_type
);
6316 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6318 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6325 return omit_one_operand (type
, integer_zero_node
, arg0
);
6330 return omit_one_operand (type
, integer_one_node
, arg0
);
6336 return omit_one_operand (type
, integer_one_node
, arg0
);
6338 return omit_one_operand (type
, integer_zero_node
, arg0
);
6343 return omit_one_operand (type
, integer_zero_node
, arg0
);
6345 return omit_one_operand (type
, integer_one_node
, arg0
);
6354 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6355 ARG0 just the signedness is changed. */
6358 fold_sign_changed_comparison (enum tree_code code
, tree type
,
6359 tree arg0
, tree arg1
)
6361 tree arg0_inner
, tmp
;
6362 tree inner_type
, outer_type
;
6364 if (TREE_CODE (arg0
) != NOP_EXPR
6365 && TREE_CODE (arg0
) != CONVERT_EXPR
)
6368 outer_type
= TREE_TYPE (arg0
);
6369 arg0_inner
= TREE_OPERAND (arg0
, 0);
6370 inner_type
= TREE_TYPE (arg0_inner
);
6372 #ifdef HAVE_canonicalize_funcptr_for_compare
6373 /* Disable this optimization if we're casting a function pointer
6374 type on targets that require function pointer canonicalization. */
6375 if (HAVE_canonicalize_funcptr_for_compare
6376 && TREE_CODE (inner_type
) == POINTER_TYPE
6377 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6381 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6384 if (TREE_CODE (arg1
) != INTEGER_CST
6385 && !((TREE_CODE (arg1
) == NOP_EXPR
6386 || TREE_CODE (arg1
) == CONVERT_EXPR
)
6387 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6390 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6395 if (TREE_CODE (arg1
) == INTEGER_CST
)
6397 tmp
= build_int_cst_wide (inner_type
,
6398 TREE_INT_CST_LOW (arg1
),
6399 TREE_INT_CST_HIGH (arg1
));
6400 arg1
= force_fit_type (tmp
, 0,
6401 TREE_OVERFLOW (arg1
),
6402 TREE_CONSTANT_OVERFLOW (arg1
));
6405 arg1
= fold_convert (inner_type
, arg1
);
6407 return fold_build2 (code
, type
, arg0_inner
, arg1
);
6410 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6411 step of the array. Reconstructs s and delta in the case of s * delta
6412 being an integer constant (and thus already folded).
6413 ADDR is the address. MULT is the multiplicative expression.
6414 If the function succeeds, the new address expression is returned. Otherwise
6415 NULL_TREE is returned. */
6418 try_move_mult_to_index (enum tree_code code
, tree addr
, tree op1
)
6420 tree s
, delta
, step
;
6421 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6425 /* Canonicalize op1 into a possibly non-constant delta
6426 and an INTEGER_CST s. */
6427 if (TREE_CODE (op1
) == MULT_EXPR
)
6429 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6434 if (TREE_CODE (arg0
) == INTEGER_CST
)
6439 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6447 else if (TREE_CODE (op1
) == INTEGER_CST
)
6454 /* Simulate we are delta * 1. */
6456 s
= integer_one_node
;
6459 for (;; ref
= TREE_OPERAND (ref
, 0))
6461 if (TREE_CODE (ref
) == ARRAY_REF
)
6463 itype
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
6467 step
= array_ref_element_size (ref
);
6468 if (TREE_CODE (step
) != INTEGER_CST
)
6473 if (! tree_int_cst_equal (step
, s
))
6478 /* Try if delta is a multiple of step. */
6479 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, delta
, step
);
6488 if (!handled_component_p (ref
))
6492 /* We found the suitable array reference. So copy everything up to it,
6493 and replace the index. */
6495 pref
= TREE_OPERAND (addr
, 0);
6496 ret
= copy_node (pref
);
6501 pref
= TREE_OPERAND (pref
, 0);
6502 TREE_OPERAND (pos
, 0) = copy_node (pref
);
6503 pos
= TREE_OPERAND (pos
, 0);
6506 TREE_OPERAND (pos
, 1) = fold_build2 (code
, itype
,
6507 fold_convert (itype
,
6508 TREE_OPERAND (pos
, 1)),
6509 fold_convert (itype
, delta
));
6511 return fold_build1 (ADDR_EXPR
, TREE_TYPE (addr
), ret
);
6515 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6516 means A >= Y && A != MAX, but in this case we know that
6517 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6520 fold_to_nonsharp_ineq_using_bound (tree ineq
, tree bound
)
6522 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6524 if (TREE_CODE (bound
) == LT_EXPR
)
6525 a
= TREE_OPERAND (bound
, 0);
6526 else if (TREE_CODE (bound
) == GT_EXPR
)
6527 a
= TREE_OPERAND (bound
, 1);
6531 typea
= TREE_TYPE (a
);
6532 if (!INTEGRAL_TYPE_P (typea
)
6533 && !POINTER_TYPE_P (typea
))
6536 if (TREE_CODE (ineq
) == LT_EXPR
)
6538 a1
= TREE_OPERAND (ineq
, 1);
6539 y
= TREE_OPERAND (ineq
, 0);
6541 else if (TREE_CODE (ineq
) == GT_EXPR
)
6543 a1
= TREE_OPERAND (ineq
, 0);
6544 y
= TREE_OPERAND (ineq
, 1);
6549 if (TREE_TYPE (a1
) != typea
)
6552 diff
= fold_build2 (MINUS_EXPR
, typea
, a1
, a
);
6553 if (!integer_onep (diff
))
6556 return fold_build2 (GE_EXPR
, type
, a
, y
);
6559 /* Fold a unary expression of code CODE and type TYPE with operand
6560 OP0. Return the folded expression if folding is successful.
6561 Otherwise, return NULL_TREE. */
6564 fold_unary (enum tree_code code
, tree type
, tree op0
)
6568 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
6570 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
6571 && TREE_CODE_LENGTH (code
) == 1);
6576 if (code
== NOP_EXPR
|| code
== CONVERT_EXPR
6577 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
)
6579 /* Don't use STRIP_NOPS, because signedness of argument type
6581 STRIP_SIGN_NOPS (arg0
);
6585 /* Strip any conversions that don't change the mode. This
6586 is safe for every expression, except for a comparison
6587 expression because its signedness is derived from its
6590 Note that this is done as an internal manipulation within
6591 the constant folder, in order to find the simplest
6592 representation of the arguments so that their form can be
6593 studied. In any cases, the appropriate type conversions
6594 should be put back in the tree that will get out of the
6600 if (TREE_CODE_CLASS (code
) == tcc_unary
)
6602 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
6603 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6604 fold_build1 (code
, type
, TREE_OPERAND (arg0
, 1)));
6605 else if (TREE_CODE (arg0
) == COND_EXPR
)
6607 tree arg01
= TREE_OPERAND (arg0
, 1);
6608 tree arg02
= TREE_OPERAND (arg0
, 2);
6609 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
6610 arg01
= fold_build1 (code
, type
, arg01
);
6611 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
6612 arg02
= fold_build1 (code
, type
, arg02
);
6613 tem
= fold_build3 (COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6616 /* If this was a conversion, and all we did was to move into
6617 inside the COND_EXPR, bring it back out. But leave it if
6618 it is a conversion from integer to integer and the
6619 result precision is no wider than a word since such a
6620 conversion is cheap and may be optimized away by combine,
6621 while it couldn't if it were outside the COND_EXPR. Then return
6622 so we don't get into an infinite recursion loop taking the
6623 conversion out and then back in. */
6625 if ((code
== NOP_EXPR
|| code
== CONVERT_EXPR
6626 || code
== NON_LVALUE_EXPR
)
6627 && TREE_CODE (tem
) == COND_EXPR
6628 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
6629 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
6630 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
6631 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
6632 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
6633 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
6634 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
6636 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
6637 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
6638 || flag_syntax_only
))
6639 tem
= build1 (code
, type
,
6641 TREE_TYPE (TREE_OPERAND
6642 (TREE_OPERAND (tem
, 1), 0)),
6643 TREE_OPERAND (tem
, 0),
6644 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
6645 TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)));
6648 else if (COMPARISON_CLASS_P (arg0
))
6650 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
6652 arg0
= copy_node (arg0
);
6653 TREE_TYPE (arg0
) = type
;
6656 else if (TREE_CODE (type
) != INTEGER_TYPE
)
6657 return fold_build3 (COND_EXPR
, type
, arg0
,
6658 fold_build1 (code
, type
,
6660 fold_build1 (code
, type
,
6661 integer_zero_node
));
6670 case FIX_TRUNC_EXPR
:
6672 case FIX_FLOOR_EXPR
:
6673 case FIX_ROUND_EXPR
:
6674 if (TREE_TYPE (op0
) == type
)
6677 /* Handle cases of two conversions in a row. */
6678 if (TREE_CODE (op0
) == NOP_EXPR
6679 || TREE_CODE (op0
) == CONVERT_EXPR
)
6681 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
6682 tree inter_type
= TREE_TYPE (op0
);
6683 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
6684 int inside_ptr
= POINTER_TYPE_P (inside_type
);
6685 int inside_float
= FLOAT_TYPE_P (inside_type
);
6686 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
6687 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
6688 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
6689 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
6690 int inter_ptr
= POINTER_TYPE_P (inter_type
);
6691 int inter_float
= FLOAT_TYPE_P (inter_type
);
6692 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
6693 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
6694 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
6695 int final_int
= INTEGRAL_TYPE_P (type
);
6696 int final_ptr
= POINTER_TYPE_P (type
);
6697 int final_float
= FLOAT_TYPE_P (type
);
6698 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
6699 unsigned int final_prec
= TYPE_PRECISION (type
);
6700 int final_unsignedp
= TYPE_UNSIGNED (type
);
6702 /* In addition to the cases of two conversions in a row
6703 handled below, if we are converting something to its own
6704 type via an object of identical or wider precision, neither
6705 conversion is needed. */
6706 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
6707 && ((inter_int
&& final_int
) || (inter_float
&& final_float
))
6708 && inter_prec
>= final_prec
)
6709 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
6711 /* Likewise, if the intermediate and final types are either both
6712 float or both integer, we don't need the middle conversion if
6713 it is wider than the final type and doesn't change the signedness
6714 (for integers). Avoid this if the final type is a pointer
6715 since then we sometimes need the inner conversion. Likewise if
6716 the outer has a precision not equal to the size of its mode. */
6717 if ((((inter_int
|| inter_ptr
) && (inside_int
|| inside_ptr
))
6718 || (inter_float
&& inside_float
)
6719 || (inter_vec
&& inside_vec
))
6720 && inter_prec
>= inside_prec
6721 && (inter_float
|| inter_vec
6722 || inter_unsignedp
== inside_unsignedp
)
6723 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
6724 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
6726 && (! final_vec
|| inter_prec
== inside_prec
))
6727 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
6729 /* If we have a sign-extension of a zero-extended value, we can
6730 replace that by a single zero-extension. */
6731 if (inside_int
&& inter_int
&& final_int
6732 && inside_prec
< inter_prec
&& inter_prec
< final_prec
6733 && inside_unsignedp
&& !inter_unsignedp
)
6734 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
6736 /* Two conversions in a row are not needed unless:
6737 - some conversion is floating-point (overstrict for now), or
6738 - some conversion is a vector (overstrict for now), or
6739 - the intermediate type is narrower than both initial and
6741 - the intermediate type and innermost type differ in signedness,
6742 and the outermost type is wider than the intermediate, or
6743 - the initial type is a pointer type and the precisions of the
6744 intermediate and final types differ, or
6745 - the final type is a pointer type and the precisions of the
6746 initial and intermediate types differ. */
6747 if (! inside_float
&& ! inter_float
&& ! final_float
6748 && ! inside_vec
&& ! inter_vec
&& ! final_vec
6749 && (inter_prec
> inside_prec
|| inter_prec
> final_prec
)
6750 && ! (inside_int
&& inter_int
6751 && inter_unsignedp
!= inside_unsignedp
6752 && inter_prec
< final_prec
)
6753 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
6754 == (final_unsignedp
&& final_prec
> inter_prec
))
6755 && ! (inside_ptr
&& inter_prec
!= final_prec
)
6756 && ! (final_ptr
&& inside_prec
!= inter_prec
)
6757 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
6758 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
6760 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
6763 /* Handle (T *)&A.B.C for A being of type T and B and C
6764 living at offset zero. This occurs frequently in
6765 C++ upcasting and then accessing the base. */
6766 if (TREE_CODE (op0
) == ADDR_EXPR
6767 && POINTER_TYPE_P (type
)
6768 && handled_component_p (TREE_OPERAND (op0
, 0)))
6770 HOST_WIDE_INT bitsize
, bitpos
;
6772 enum machine_mode mode
;
6773 int unsignedp
, volatilep
;
6774 tree base
= TREE_OPERAND (op0
, 0);
6775 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
6776 &mode
, &unsignedp
, &volatilep
, false);
6777 /* If the reference was to a (constant) zero offset, we can use
6778 the address of the base if it has the same base type
6779 as the result type. */
6780 if (! offset
&& bitpos
== 0
6781 && TYPE_MAIN_VARIANT (TREE_TYPE (type
))
6782 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
6783 return fold_convert (type
, build_fold_addr_expr (base
));
6786 if (TREE_CODE (op0
) == MODIFY_EXPR
6787 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
6788 /* Detect assigning a bitfield. */
6789 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
6790 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
6792 /* Don't leave an assignment inside a conversion
6793 unless assigning a bitfield. */
6794 tem
= fold_build1 (code
, type
, TREE_OPERAND (op0
, 1));
6795 /* First do the assignment, then return converted constant. */
6796 tem
= build2 (COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
6797 TREE_NO_WARNING (tem
) = 1;
6798 TREE_USED (tem
) = 1;
6802 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6803 constants (if x has signed type, the sign bit cannot be set
6804 in c). This folds extension into the BIT_AND_EXPR. */
6805 if (INTEGRAL_TYPE_P (type
)
6806 && TREE_CODE (type
) != BOOLEAN_TYPE
6807 && TREE_CODE (op0
) == BIT_AND_EXPR
6808 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
6811 tree and0
= TREE_OPERAND (and, 0), and1
= TREE_OPERAND (and, 1);
6814 if (TYPE_UNSIGNED (TREE_TYPE (and))
6815 || (TYPE_PRECISION (type
)
6816 <= TYPE_PRECISION (TREE_TYPE (and))))
6818 else if (TYPE_PRECISION (TREE_TYPE (and1
))
6819 <= HOST_BITS_PER_WIDE_INT
6820 && host_integerp (and1
, 1))
6822 unsigned HOST_WIDE_INT cst
;
6824 cst
= tree_low_cst (and1
, 1);
6825 cst
&= (HOST_WIDE_INT
) -1
6826 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
6827 change
= (cst
== 0);
6828 #ifdef LOAD_EXTEND_OP
6830 && !flag_syntax_only
6831 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
6834 tree uns
= lang_hooks
.types
.unsigned_type (TREE_TYPE (and0
));
6835 and0
= fold_convert (uns
, and0
);
6836 and1
= fold_convert (uns
, and1
);
6842 tem
= build_int_cst_wide (type
, TREE_INT_CST_LOW (and1
),
6843 TREE_INT_CST_HIGH (and1
));
6844 tem
= force_fit_type (tem
, 0, TREE_OVERFLOW (and1
),
6845 TREE_CONSTANT_OVERFLOW (and1
));
6846 return fold_build2 (BIT_AND_EXPR
, type
,
6847 fold_convert (type
, and0
), tem
);
6851 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6852 T2 being pointers to types of the same size. */
6853 if (POINTER_TYPE_P (type
)
6854 && BINARY_CLASS_P (arg0
)
6855 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
6856 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
6858 tree arg00
= TREE_OPERAND (arg0
, 0);
6860 tree t1
= TREE_TYPE (arg00
);
6861 tree tt0
= TREE_TYPE (t0
);
6862 tree tt1
= TREE_TYPE (t1
);
6863 tree s0
= TYPE_SIZE (tt0
);
6864 tree s1
= TYPE_SIZE (tt1
);
6866 if (s0
&& s1
&& operand_equal_p (s0
, s1
, OEP_ONLY_CONST
))
6867 return build2 (TREE_CODE (arg0
), t0
, fold_convert (t0
, arg00
),
6868 TREE_OPERAND (arg0
, 1));
6871 tem
= fold_convert_const (code
, type
, arg0
);
6872 return tem
? tem
: NULL_TREE
;
6874 case VIEW_CONVERT_EXPR
:
6875 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
6876 return build1 (VIEW_CONVERT_EXPR
, type
, TREE_OPERAND (op0
, 0));
6880 if (negate_expr_p (arg0
))
6881 return fold_convert (type
, negate_expr (arg0
));
6882 /* Convert - (~A) to A + 1. */
6883 if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == BIT_NOT_EXPR
)
6884 return fold_build2 (PLUS_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6885 build_int_cst (type
, 1));
6889 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
6890 return fold_abs_const (arg0
, type
);
6891 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
6892 return fold_build1 (ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
6893 /* Convert fabs((double)float) into (double)fabsf(float). */
6894 else if (TREE_CODE (arg0
) == NOP_EXPR
6895 && TREE_CODE (type
) == REAL_TYPE
)
6897 tree targ0
= strip_float_extensions (arg0
);
6899 return fold_convert (type
, fold_build1 (ABS_EXPR
,
6903 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
6904 else if (tree_expr_nonnegative_p (arg0
) || TREE_CODE (arg0
) == ABS_EXPR
)
6907 /* Strip sign ops from argument. */
6908 if (TREE_CODE (type
) == REAL_TYPE
)
6910 tem
= fold_strip_sign_ops (arg0
);
6912 return fold_build1 (ABS_EXPR
, type
, fold_convert (type
, tem
));
6917 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
6918 return fold_convert (type
, arg0
);
6919 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
6920 return build2 (COMPLEX_EXPR
, type
,
6921 TREE_OPERAND (arg0
, 0),
6922 negate_expr (TREE_OPERAND (arg0
, 1)));
6923 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
6924 return build_complex (type
, TREE_REALPART (arg0
),
6925 negate_expr (TREE_IMAGPART (arg0
)));
6926 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
6927 return fold_build2 (TREE_CODE (arg0
), type
,
6928 fold_build1 (CONJ_EXPR
, type
,
6929 TREE_OPERAND (arg0
, 0)),
6930 fold_build1 (CONJ_EXPR
, type
,
6931 TREE_OPERAND (arg0
, 1)));
6932 else if (TREE_CODE (arg0
) == CONJ_EXPR
)
6933 return TREE_OPERAND (arg0
, 0);
6937 if (TREE_CODE (arg0
) == INTEGER_CST
)
6938 return fold_not_const (arg0
, type
);
6939 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
6940 return TREE_OPERAND (arg0
, 0);
6941 /* Convert ~ (-A) to A - 1. */
6942 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
6943 return fold_build2 (MINUS_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6944 build_int_cst (type
, 1));
6945 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
6946 else if (INTEGRAL_TYPE_P (type
)
6947 && ((TREE_CODE (arg0
) == MINUS_EXPR
6948 && integer_onep (TREE_OPERAND (arg0
, 1)))
6949 || (TREE_CODE (arg0
) == PLUS_EXPR
6950 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
6951 return fold_build1 (NEGATE_EXPR
, type
, TREE_OPERAND (arg0
, 0));
6952 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
6953 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
6954 && (tem
= fold_unary (BIT_NOT_EXPR
, type
,
6956 TREE_OPERAND (arg0
, 0)))))
6957 return fold_build2 (BIT_XOR_EXPR
, type
, tem
,
6958 fold_convert (type
, TREE_OPERAND (arg0
, 1)));
6959 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
6960 && (tem
= fold_unary (BIT_NOT_EXPR
, type
,
6962 TREE_OPERAND (arg0
, 1)))))
6963 return fold_build2 (BIT_XOR_EXPR
, type
,
6964 fold_convert (type
, TREE_OPERAND (arg0
, 0)), tem
);
6968 case TRUTH_NOT_EXPR
:
6969 /* The argument to invert_truthvalue must have Boolean type. */
6970 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
6971 arg0
= fold_convert (boolean_type_node
, arg0
);
6973 /* Note that the operand of this must be an int
6974 and its values must be 0 or 1.
6975 ("true" is a fixed value perhaps depending on the language,
6976 but we don't handle values other than 1 correctly yet.) */
6977 tem
= invert_truthvalue (arg0
);
6978 /* Avoid infinite recursion. */
6979 if (TREE_CODE (tem
) == TRUTH_NOT_EXPR
)
6981 return fold_convert (type
, tem
);
6984 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
6986 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
6987 return omit_one_operand (type
, TREE_OPERAND (arg0
, 0),
6988 TREE_OPERAND (arg0
, 1));
6989 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
6990 return TREE_REALPART (arg0
);
6991 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
6992 return fold_build2 (TREE_CODE (arg0
), type
,
6993 fold_build1 (REALPART_EXPR
, type
,
6994 TREE_OPERAND (arg0
, 0)),
6995 fold_build1 (REALPART_EXPR
, type
,
6996 TREE_OPERAND (arg0
, 1)));
7000 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7001 return fold_convert (type
, integer_zero_node
);
7002 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7003 return omit_one_operand (type
, TREE_OPERAND (arg0
, 1),
7004 TREE_OPERAND (arg0
, 0));
7005 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
7006 return TREE_IMAGPART (arg0
);
7007 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7008 return fold_build2 (TREE_CODE (arg0
), type
,
7009 fold_build1 (IMAGPART_EXPR
, type
,
7010 TREE_OPERAND (arg0
, 0)),
7011 fold_build1 (IMAGPART_EXPR
, type
,
7012 TREE_OPERAND (arg0
, 1)));
7017 } /* switch (code) */
7020 /* Fold a binary expression of code CODE and type TYPE with operands
7021 OP0 and OP1. Return the folded expression if folding is
7022 successful. Otherwise, return NULL_TREE. */
7025 fold_binary (enum tree_code code
, tree type
, tree op0
, tree op1
)
7027 tree t1
= NULL_TREE
;
7029 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
7030 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7032 /* WINS will be nonzero when the switch is done
7033 if all operands are constant. */
7036 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7037 && TREE_CODE_LENGTH (code
) == 2);
7046 /* Strip any conversions that don't change the mode. This is
7047 safe for every expression, except for a comparison expression
7048 because its signedness is derived from its operands. So, in
7049 the latter case, only strip conversions that don't change the
7052 Note that this is done as an internal manipulation within the
7053 constant folder, in order to find the simplest representation
7054 of the arguments so that their form can be studied. In any
7055 cases, the appropriate type conversions should be put back in
7056 the tree that will get out of the constant folder. */
7057 if (kind
== tcc_comparison
)
7058 STRIP_SIGN_NOPS (arg0
);
7062 if (TREE_CODE (arg0
) == COMPLEX_CST
)
7063 subop
= TREE_REALPART (arg0
);
7067 if (TREE_CODE (subop
) != INTEGER_CST
7068 && TREE_CODE (subop
) != REAL_CST
)
7069 /* Note that TREE_CONSTANT isn't enough:
7070 static var addresses are constant but we can't
7071 do arithmetic on them. */
7079 /* Strip any conversions that don't change the mode. This is
7080 safe for every expression, except for a comparison expression
7081 because its signedness is derived from its operands. So, in
7082 the latter case, only strip conversions that don't change the
7085 Note that this is done as an internal manipulation within the
7086 constant folder, in order to find the simplest representation
7087 of the arguments so that their form can be studied. In any
7088 cases, the appropriate type conversions should be put back in
7089 the tree that will get out of the constant folder. */
7090 if (kind
== tcc_comparison
)
7091 STRIP_SIGN_NOPS (arg1
);
7095 if (TREE_CODE (arg1
) == COMPLEX_CST
)
7096 subop
= TREE_REALPART (arg1
);
7100 if (TREE_CODE (subop
) != INTEGER_CST
7101 && TREE_CODE (subop
) != REAL_CST
)
7102 /* Note that TREE_CONSTANT isn't enough:
7103 static var addresses are constant but we can't
7104 do arithmetic on them. */
7108 /* If this is a commutative operation, and ARG0 is a constant, move it
7109 to ARG1 to reduce the number of tests below. */
7110 if (commutative_tree_code (code
)
7111 && tree_swap_operands_p (arg0
, arg1
, true))
7112 return fold_build2 (code
, type
, op1
, op0
);
7114 /* Now WINS is set as described above,
7115 ARG0 is the first operand of EXPR,
7116 and ARG1 is the second operand (if it has more than one operand).
7118 First check for cases where an arithmetic operation is applied to a
7119 compound, conditional, or comparison operation. Push the arithmetic
7120 operation inside the compound or conditional to see if any folding
7121 can then be done. Convert comparison to conditional for this purpose.
7122 The also optimizes non-constant cases that used to be done in
7125 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7126 one of the operands is a comparison and the other is a comparison, a
7127 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7128 code below would make the expression more complex. Change it to a
7129 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7130 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7132 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
7133 || code
== EQ_EXPR
|| code
== NE_EXPR
)
7134 && ((truth_value_p (TREE_CODE (arg0
))
7135 && (truth_value_p (TREE_CODE (arg1
))
7136 || (TREE_CODE (arg1
) == BIT_AND_EXPR
7137 && integer_onep (TREE_OPERAND (arg1
, 1)))))
7138 || (truth_value_p (TREE_CODE (arg1
))
7139 && (truth_value_p (TREE_CODE (arg0
))
7140 || (TREE_CODE (arg0
) == BIT_AND_EXPR
7141 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
7143 tem
= fold_build2 (code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
7144 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
7147 fold_convert (boolean_type_node
, arg0
),
7148 fold_convert (boolean_type_node
, arg1
));
7150 if (code
== EQ_EXPR
)
7151 tem
= invert_truthvalue (tem
);
7153 return fold_convert (type
, tem
);
7156 if (TREE_CODE_CLASS (code
) == tcc_binary
7157 || TREE_CODE_CLASS (code
) == tcc_comparison
)
7159 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7160 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7161 fold_build2 (code
, type
,
7162 TREE_OPERAND (arg0
, 1), op1
));
7163 if (TREE_CODE (arg1
) == COMPOUND_EXPR
7164 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
7165 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
7166 fold_build2 (code
, type
,
7167 op0
, TREE_OPERAND (arg1
, 1)));
7169 if (TREE_CODE (arg0
) == COND_EXPR
|| COMPARISON_CLASS_P (arg0
))
7171 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
7173 /*cond_first_p=*/1);
7174 if (tem
!= NULL_TREE
)
7178 if (TREE_CODE (arg1
) == COND_EXPR
|| COMPARISON_CLASS_P (arg1
))
7180 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
7182 /*cond_first_p=*/0);
7183 if (tem
!= NULL_TREE
)
7191 /* A + (-B) -> A - B */
7192 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
7193 return fold_build2 (MINUS_EXPR
, type
,
7194 fold_convert (type
, arg0
),
7195 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
7196 /* (-A) + B -> B - A */
7197 if (TREE_CODE (arg0
) == NEGATE_EXPR
7198 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
7199 return fold_build2 (MINUS_EXPR
, type
,
7200 fold_convert (type
, arg1
),
7201 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
7202 /* Convert ~A + 1 to -A. */
7203 if (INTEGRAL_TYPE_P (type
)
7204 && TREE_CODE (arg0
) == BIT_NOT_EXPR
7205 && integer_onep (arg1
))
7206 return fold_build1 (NEGATE_EXPR
, type
, TREE_OPERAND (arg0
, 0));
7208 if (! FLOAT_TYPE_P (type
))
7210 if (integer_zerop (arg1
))
7211 return non_lvalue (fold_convert (type
, arg0
));
7213 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7214 with a constant, and the two constants have no bits in common,
7215 we should treat this as a BIT_IOR_EXPR since this may produce more
7217 if (TREE_CODE (arg0
) == BIT_AND_EXPR
7218 && TREE_CODE (arg1
) == BIT_AND_EXPR
7219 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7220 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
7221 && integer_zerop (const_binop (BIT_AND_EXPR
,
7222 TREE_OPERAND (arg0
, 1),
7223 TREE_OPERAND (arg1
, 1), 0)))
7225 code
= BIT_IOR_EXPR
;
7229 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7230 (plus (plus (mult) (mult)) (foo)) so that we can
7231 take advantage of the factoring cases below. */
7232 if (((TREE_CODE (arg0
) == PLUS_EXPR
7233 || TREE_CODE (arg0
) == MINUS_EXPR
)
7234 && TREE_CODE (arg1
) == MULT_EXPR
)
7235 || ((TREE_CODE (arg1
) == PLUS_EXPR
7236 || TREE_CODE (arg1
) == MINUS_EXPR
)
7237 && TREE_CODE (arg0
) == MULT_EXPR
))
7239 tree parg0
, parg1
, parg
, marg
;
7240 enum tree_code pcode
;
7242 if (TREE_CODE (arg1
) == MULT_EXPR
)
7243 parg
= arg0
, marg
= arg1
;
7245 parg
= arg1
, marg
= arg0
;
7246 pcode
= TREE_CODE (parg
);
7247 parg0
= TREE_OPERAND (parg
, 0);
7248 parg1
= TREE_OPERAND (parg
, 1);
7252 if (TREE_CODE (parg0
) == MULT_EXPR
7253 && TREE_CODE (parg1
) != MULT_EXPR
)
7254 return fold_build2 (pcode
, type
,
7255 fold_build2 (PLUS_EXPR
, type
,
7256 fold_convert (type
, parg0
),
7257 fold_convert (type
, marg
)),
7258 fold_convert (type
, parg1
));
7259 if (TREE_CODE (parg0
) != MULT_EXPR
7260 && TREE_CODE (parg1
) == MULT_EXPR
)
7261 return fold_build2 (PLUS_EXPR
, type
,
7262 fold_convert (type
, parg0
),
7263 fold_build2 (pcode
, type
,
7264 fold_convert (type
, marg
),
7269 if (TREE_CODE (arg0
) == MULT_EXPR
&& TREE_CODE (arg1
) == MULT_EXPR
)
7271 tree arg00
, arg01
, arg10
, arg11
;
7272 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7274 /* (A * C) + (B * C) -> (A+B) * C.
7275 We are most concerned about the case where C is a constant,
7276 but other combinations show up during loop reduction. Since
7277 it is not difficult, try all four possibilities. */
7279 arg00
= TREE_OPERAND (arg0
, 0);
7280 arg01
= TREE_OPERAND (arg0
, 1);
7281 arg10
= TREE_OPERAND (arg1
, 0);
7282 arg11
= TREE_OPERAND (arg1
, 1);
7285 if (operand_equal_p (arg01
, arg11
, 0))
7286 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7287 else if (operand_equal_p (arg00
, arg10
, 0))
7288 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7289 else if (operand_equal_p (arg00
, arg11
, 0))
7290 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7291 else if (operand_equal_p (arg01
, arg10
, 0))
7292 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7294 /* No identical multiplicands; see if we can find a common
7295 power-of-two factor in non-power-of-two multiplies. This
7296 can help in multi-dimensional array access. */
7297 else if (TREE_CODE (arg01
) == INTEGER_CST
7298 && TREE_CODE (arg11
) == INTEGER_CST
7299 && TREE_INT_CST_HIGH (arg01
) == 0
7300 && TREE_INT_CST_HIGH (arg11
) == 0)
7302 HOST_WIDE_INT int01
, int11
, tmp
;
7303 int01
= TREE_INT_CST_LOW (arg01
);
7304 int11
= TREE_INT_CST_LOW (arg11
);
7306 /* Move min of absolute values to int11. */
7307 if ((int01
>= 0 ? int01
: -int01
)
7308 < (int11
>= 0 ? int11
: -int11
))
7310 tmp
= int01
, int01
= int11
, int11
= tmp
;
7311 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7312 alt0
= arg01
, arg01
= arg11
, arg11
= alt0
;
7315 if (exact_log2 (int11
) > 0 && int01
% int11
== 0)
7317 alt0
= fold_build2 (MULT_EXPR
, type
, arg00
,
7318 build_int_cst (NULL_TREE
,
7326 return fold_build2 (MULT_EXPR
, type
,
7327 fold_build2 (PLUS_EXPR
, type
,
7328 fold_convert (type
, alt0
),
7329 fold_convert (type
, alt1
)),
7330 fold_convert (type
, same
));
7333 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7334 of the array. Loop optimizer sometimes produce this type of
7336 if (TREE_CODE (arg0
) == ADDR_EXPR
)
7338 tem
= try_move_mult_to_index (PLUS_EXPR
, arg0
, arg1
);
7340 return fold_convert (type
, tem
);
7342 else if (TREE_CODE (arg1
) == ADDR_EXPR
)
7344 tem
= try_move_mult_to_index (PLUS_EXPR
, arg1
, arg0
);
7346 return fold_convert (type
, tem
);
7351 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7352 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
7353 return non_lvalue (fold_convert (type
, arg0
));
7355 /* Likewise if the operands are reversed. */
7356 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
7357 return non_lvalue (fold_convert (type
, arg1
));
7359 /* Convert X + -C into X - C. */
7360 if (TREE_CODE (arg1
) == REAL_CST
7361 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
7363 tem
= fold_negate_const (arg1
, type
);
7364 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
7365 return fold_build2 (MINUS_EXPR
, type
,
7366 fold_convert (type
, arg0
),
7367 fold_convert (type
, tem
));
7370 if (flag_unsafe_math_optimizations
7371 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
7372 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
7373 && (tem
= distribute_real_division (code
, type
, arg0
, arg1
)))
7376 /* Convert x+x into x*2.0. */
7377 if (operand_equal_p (arg0
, arg1
, 0)
7378 && SCALAR_FLOAT_TYPE_P (type
))
7379 return fold_build2 (MULT_EXPR
, type
, arg0
,
7380 build_real (type
, dconst2
));
7382 /* Convert x*c+x into x*(c+1). */
7383 if (flag_unsafe_math_optimizations
7384 && TREE_CODE (arg0
) == MULT_EXPR
7385 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
7386 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0
, 1))
7387 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
7391 c
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
7392 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
7393 return fold_build2 (MULT_EXPR
, type
, arg1
,
7394 build_real (type
, c
));
7397 /* Convert x+x*c into x*(c+1). */
7398 if (flag_unsafe_math_optimizations
7399 && TREE_CODE (arg1
) == MULT_EXPR
7400 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
7401 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1
, 1))
7402 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
7406 c
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
7407 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
7408 return fold_build2 (MULT_EXPR
, type
, arg0
,
7409 build_real (type
, c
));
7412 /* Convert x*c1+x*c2 into x*(c1+c2). */
7413 if (flag_unsafe_math_optimizations
7414 && TREE_CODE (arg0
) == MULT_EXPR
7415 && TREE_CODE (arg1
) == MULT_EXPR
7416 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
7417 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0
, 1))
7418 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
7419 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1
, 1))
7420 && operand_equal_p (TREE_OPERAND (arg0
, 0),
7421 TREE_OPERAND (arg1
, 0), 0))
7423 REAL_VALUE_TYPE c1
, c2
;
7425 c1
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
7426 c2
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
7427 real_arithmetic (&c1
, PLUS_EXPR
, &c1
, &c2
);
7428 return fold_build2 (MULT_EXPR
, type
,
7429 TREE_OPERAND (arg0
, 0),
7430 build_real (type
, c1
));
7432 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7433 if (flag_unsafe_math_optimizations
7434 && TREE_CODE (arg1
) == PLUS_EXPR
7435 && TREE_CODE (arg0
) != MULT_EXPR
)
7437 tree tree10
= TREE_OPERAND (arg1
, 0);
7438 tree tree11
= TREE_OPERAND (arg1
, 1);
7439 if (TREE_CODE (tree11
) == MULT_EXPR
7440 && TREE_CODE (tree10
) == MULT_EXPR
)
7443 tree0
= fold_build2 (PLUS_EXPR
, type
, arg0
, tree10
);
7444 return fold_build2 (PLUS_EXPR
, type
, tree0
, tree11
);
7447 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7448 if (flag_unsafe_math_optimizations
7449 && TREE_CODE (arg0
) == PLUS_EXPR
7450 && TREE_CODE (arg1
) != MULT_EXPR
)
7452 tree tree00
= TREE_OPERAND (arg0
, 0);
7453 tree tree01
= TREE_OPERAND (arg0
, 1);
7454 if (TREE_CODE (tree01
) == MULT_EXPR
7455 && TREE_CODE (tree00
) == MULT_EXPR
)
7458 tree0
= fold_build2 (PLUS_EXPR
, type
, tree01
, arg1
);
7459 return fold_build2 (PLUS_EXPR
, type
, tree00
, tree0
);
7465 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7466 is a rotate of A by C1 bits. */
7467 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7468 is a rotate of A by B bits. */
7470 enum tree_code code0
, code1
;
7471 code0
= TREE_CODE (arg0
);
7472 code1
= TREE_CODE (arg1
);
7473 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
7474 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
7475 && operand_equal_p (TREE_OPERAND (arg0
, 0),
7476 TREE_OPERAND (arg1
, 0), 0)
7477 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
7479 tree tree01
, tree11
;
7480 enum tree_code code01
, code11
;
7482 tree01
= TREE_OPERAND (arg0
, 1);
7483 tree11
= TREE_OPERAND (arg1
, 1);
7484 STRIP_NOPS (tree01
);
7485 STRIP_NOPS (tree11
);
7486 code01
= TREE_CODE (tree01
);
7487 code11
= TREE_CODE (tree11
);
7488 if (code01
== INTEGER_CST
7489 && code11
== INTEGER_CST
7490 && TREE_INT_CST_HIGH (tree01
) == 0
7491 && TREE_INT_CST_HIGH (tree11
) == 0
7492 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
7493 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
7494 return build2 (LROTATE_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7495 code0
== LSHIFT_EXPR
? tree01
: tree11
);
7496 else if (code11
== MINUS_EXPR
)
7498 tree tree110
, tree111
;
7499 tree110
= TREE_OPERAND (tree11
, 0);
7500 tree111
= TREE_OPERAND (tree11
, 1);
7501 STRIP_NOPS (tree110
);
7502 STRIP_NOPS (tree111
);
7503 if (TREE_CODE (tree110
) == INTEGER_CST
7504 && 0 == compare_tree_int (tree110
,
7506 (TREE_TYPE (TREE_OPERAND
7508 && operand_equal_p (tree01
, tree111
, 0))
7509 return build2 ((code0
== LSHIFT_EXPR
7512 type
, TREE_OPERAND (arg0
, 0), tree01
);
7514 else if (code01
== MINUS_EXPR
)
7516 tree tree010
, tree011
;
7517 tree010
= TREE_OPERAND (tree01
, 0);
7518 tree011
= TREE_OPERAND (tree01
, 1);
7519 STRIP_NOPS (tree010
);
7520 STRIP_NOPS (tree011
);
7521 if (TREE_CODE (tree010
) == INTEGER_CST
7522 && 0 == compare_tree_int (tree010
,
7524 (TREE_TYPE (TREE_OPERAND
7526 && operand_equal_p (tree11
, tree011
, 0))
7527 return build2 ((code0
!= LSHIFT_EXPR
7530 type
, TREE_OPERAND (arg0
, 0), tree11
);
7536 /* In most languages, can't associate operations on floats through
7537 parentheses. Rather than remember where the parentheses were, we
7538 don't associate floats at all, unless the user has specified
7539 -funsafe-math-optimizations. */
7542 && (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
7544 tree var0
, con0
, lit0
, minus_lit0
;
7545 tree var1
, con1
, lit1
, minus_lit1
;
7547 /* Split both trees into variables, constants, and literals. Then
7548 associate each group together, the constants with literals,
7549 then the result with variables. This increases the chances of
7550 literals being recombined later and of generating relocatable
7551 expressions for the sum of a constant and literal. */
7552 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
7553 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
7554 code
== MINUS_EXPR
);
7556 /* Only do something if we found more than two objects. Otherwise,
7557 nothing has changed and we risk infinite recursion. */
7558 if (2 < ((var0
!= 0) + (var1
!= 0)
7559 + (con0
!= 0) + (con1
!= 0)
7560 + (lit0
!= 0) + (lit1
!= 0)
7561 + (minus_lit0
!= 0) + (minus_lit1
!= 0)))
7563 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7564 if (code
== MINUS_EXPR
)
7567 var0
= associate_trees (var0
, var1
, code
, type
);
7568 con0
= associate_trees (con0
, con1
, code
, type
);
7569 lit0
= associate_trees (lit0
, lit1
, code
, type
);
7570 minus_lit0
= associate_trees (minus_lit0
, minus_lit1
, code
, type
);
7572 /* Preserve the MINUS_EXPR if the negative part of the literal is
7573 greater than the positive part. Otherwise, the multiplicative
7574 folding code (i.e extract_muldiv) may be fooled in case
7575 unsigned constants are subtracted, like in the following
7576 example: ((X*2 + 4) - 8U)/2. */
7577 if (minus_lit0
&& lit0
)
7579 if (TREE_CODE (lit0
) == INTEGER_CST
7580 && TREE_CODE (minus_lit0
) == INTEGER_CST
7581 && tree_int_cst_lt (lit0
, minus_lit0
))
7583 minus_lit0
= associate_trees (minus_lit0
, lit0
,
7589 lit0
= associate_trees (lit0
, minus_lit0
,
7597 return fold_convert (type
,
7598 associate_trees (var0
, minus_lit0
,
7602 con0
= associate_trees (con0
, minus_lit0
,
7604 return fold_convert (type
,
7605 associate_trees (var0
, con0
,
7610 con0
= associate_trees (con0
, lit0
, code
, type
);
7611 return fold_convert (type
, associate_trees (var0
, con0
,
7618 t1
= const_binop (code
, arg0
, arg1
, 0);
7619 if (t1
!= NULL_TREE
)
7621 /* The return value should always have
7622 the same type as the original expression. */
7623 if (TREE_TYPE (t1
) != type
)
7624 t1
= fold_convert (type
, t1
);
7631 /* A - (-B) -> A + B */
7632 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
7633 return fold_build2 (PLUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0));
7634 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7635 if (TREE_CODE (arg0
) == NEGATE_EXPR
7636 && (FLOAT_TYPE_P (type
)
7637 || (INTEGRAL_TYPE_P (type
) && flag_wrapv
&& !flag_trapv
))
7638 && negate_expr_p (arg1
)
7639 && reorder_operands_p (arg0
, arg1
))
7640 return fold_build2 (MINUS_EXPR
, type
, negate_expr (arg1
),
7641 TREE_OPERAND (arg0
, 0));
7642 /* Convert -A - 1 to ~A. */
7643 if (INTEGRAL_TYPE_P (type
)
7644 && TREE_CODE (arg0
) == NEGATE_EXPR
7645 && integer_onep (arg1
))
7646 return fold_build1 (BIT_NOT_EXPR
, type
, TREE_OPERAND (arg0
, 0));
7648 /* Convert -1 - A to ~A. */
7649 if (INTEGRAL_TYPE_P (type
)
7650 && integer_all_onesp (arg0
))
7651 return fold_build1 (BIT_NOT_EXPR
, type
, arg1
);
7653 if (! FLOAT_TYPE_P (type
))
7655 if (! wins
&& integer_zerop (arg0
))
7656 return negate_expr (fold_convert (type
, arg1
));
7657 if (integer_zerop (arg1
))
7658 return non_lvalue (fold_convert (type
, arg0
));
7660 /* Fold A - (A & B) into ~B & A. */
7661 if (!TREE_SIDE_EFFECTS (arg0
)
7662 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
7664 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
7665 return fold_build2 (BIT_AND_EXPR
, type
,
7666 fold_build1 (BIT_NOT_EXPR
, type
,
7667 TREE_OPERAND (arg1
, 0)),
7669 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
7670 return fold_build2 (BIT_AND_EXPR
, type
,
7671 fold_build1 (BIT_NOT_EXPR
, type
,
7672 TREE_OPERAND (arg1
, 1)),
7676 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7677 any power of 2 minus 1. */
7678 if (TREE_CODE (arg0
) == BIT_AND_EXPR
7679 && TREE_CODE (arg1
) == BIT_AND_EXPR
7680 && operand_equal_p (TREE_OPERAND (arg0
, 0),
7681 TREE_OPERAND (arg1
, 0), 0))
7683 tree mask0
= TREE_OPERAND (arg0
, 1);
7684 tree mask1
= TREE_OPERAND (arg1
, 1);
7685 tree tem
= fold_build1 (BIT_NOT_EXPR
, type
, mask0
);
7687 if (operand_equal_p (tem
, mask1
, 0))
7689 tem
= fold_build2 (BIT_XOR_EXPR
, type
,
7690 TREE_OPERAND (arg0
, 0), mask1
);
7691 return fold_build2 (MINUS_EXPR
, type
, tem
, mask1
);
7696 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7697 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
7698 return non_lvalue (fold_convert (type
, arg0
));
7700 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7701 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7702 (-ARG1 + ARG0) reduces to -ARG1. */
7703 else if (!wins
&& fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
7704 return negate_expr (fold_convert (type
, arg1
));
7706 /* Fold &x - &x. This can happen from &x.foo - &x.
7707 This is unsafe for certain floats even in non-IEEE formats.
7708 In IEEE, it is unsafe because it does wrong for NaNs.
7709 Also note that operand_equal_p is always false if an operand
7712 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
7713 && operand_equal_p (arg0
, arg1
, 0))
7714 return fold_convert (type
, integer_zero_node
);
7716 /* A - B -> A + (-B) if B is easily negatable. */
7717 if (!wins
&& negate_expr_p (arg1
)
7718 && ((FLOAT_TYPE_P (type
)
7719 /* Avoid this transformation if B is a positive REAL_CST. */
7720 && (TREE_CODE (arg1
) != REAL_CST
7721 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
7722 || (INTEGRAL_TYPE_P (type
) && flag_wrapv
&& !flag_trapv
)))
7723 return fold_build2 (PLUS_EXPR
, type
,
7724 fold_convert (type
, arg0
),
7725 fold_convert (type
, negate_expr (arg1
)));
7727 /* Try folding difference of addresses. */
7731 if ((TREE_CODE (arg0
) == ADDR_EXPR
7732 || TREE_CODE (arg1
) == ADDR_EXPR
)
7733 && ptr_difference_const (arg0
, arg1
, &diff
))
7734 return build_int_cst_type (type
, diff
);
7737 /* Fold &a[i] - &a[j] to i-j. */
7738 if (TREE_CODE (arg0
) == ADDR_EXPR
7739 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
7740 && TREE_CODE (arg1
) == ADDR_EXPR
7741 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
7743 tree aref0
= TREE_OPERAND (arg0
, 0);
7744 tree aref1
= TREE_OPERAND (arg1
, 0);
7745 if (operand_equal_p (TREE_OPERAND (aref0
, 0),
7746 TREE_OPERAND (aref1
, 0), 0))
7748 tree op0
= fold_convert (type
, TREE_OPERAND (aref0
, 1));
7749 tree op1
= fold_convert (type
, TREE_OPERAND (aref1
, 1));
7750 tree esz
= array_ref_element_size (aref0
);
7751 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
7752 return fold_build2 (MULT_EXPR
, type
, diff
,
7753 fold_convert (type
, esz
));
7758 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7759 of the array. Loop optimizer sometimes produce this type of
7761 if (TREE_CODE (arg0
) == ADDR_EXPR
)
7763 tem
= try_move_mult_to_index (MINUS_EXPR
, arg0
, arg1
);
7765 return fold_convert (type
, tem
);
7768 if (flag_unsafe_math_optimizations
7769 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
7770 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
7771 && (tem
= distribute_real_division (code
, type
, arg0
, arg1
)))
7774 if (TREE_CODE (arg0
) == MULT_EXPR
7775 && TREE_CODE (arg1
) == MULT_EXPR
7776 && (!FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
7778 /* (A * C) - (B * C) -> (A-B) * C. */
7779 if (operand_equal_p (TREE_OPERAND (arg0
, 1),
7780 TREE_OPERAND (arg1
, 1), 0))
7781 return fold_build2 (MULT_EXPR
, type
,
7782 fold_build2 (MINUS_EXPR
, type
,
7783 TREE_OPERAND (arg0
, 0),
7784 TREE_OPERAND (arg1
, 0)),
7785 TREE_OPERAND (arg0
, 1));
7786 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7787 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
7788 TREE_OPERAND (arg1
, 0), 0))
7789 return fold_build2 (MULT_EXPR
, type
,
7790 TREE_OPERAND (arg0
, 0),
7791 fold_build2 (MINUS_EXPR
, type
,
7792 TREE_OPERAND (arg0
, 1),
7793 TREE_OPERAND (arg1
, 1)));
7799 /* (-A) * (-B) -> A * B */
7800 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
7801 return fold_build2 (MULT_EXPR
, type
,
7802 TREE_OPERAND (arg0
, 0),
7803 negate_expr (arg1
));
7804 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
7805 return fold_build2 (MULT_EXPR
, type
,
7807 TREE_OPERAND (arg1
, 0));
7809 if (! FLOAT_TYPE_P (type
))
7811 if (integer_zerop (arg1
))
7812 return omit_one_operand (type
, arg1
, arg0
);
7813 if (integer_onep (arg1
))
7814 return non_lvalue (fold_convert (type
, arg0
));
7815 /* Transform x * -1 into -x. */
7816 if (integer_all_onesp (arg1
))
7817 return fold_convert (type
, negate_expr (arg0
));
7819 /* (a * (1 << b)) is (a << b) */
7820 if (TREE_CODE (arg1
) == LSHIFT_EXPR
7821 && integer_onep (TREE_OPERAND (arg1
, 0)))
7822 return fold_build2 (LSHIFT_EXPR
, type
, arg0
,
7823 TREE_OPERAND (arg1
, 1));
7824 if (TREE_CODE (arg0
) == LSHIFT_EXPR
7825 && integer_onep (TREE_OPERAND (arg0
, 0)))
7826 return fold_build2 (LSHIFT_EXPR
, type
, arg1
,
7827 TREE_OPERAND (arg0
, 1));
7829 if (TREE_CODE (arg1
) == INTEGER_CST
7830 && 0 != (tem
= extract_muldiv (op0
,
7831 fold_convert (type
, arg1
),
7833 return fold_convert (type
, tem
);
7838 /* Maybe fold x * 0 to 0. The expressions aren't the same
7839 when x is NaN, since x * 0 is also NaN. Nor are they the
7840 same in modes with signed zeros, since multiplying a
7841 negative value by 0 gives -0, not +0. */
7842 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
7843 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
7844 && real_zerop (arg1
))
7845 return omit_one_operand (type
, arg1
, arg0
);
7846 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7847 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
7848 && real_onep (arg1
))
7849 return non_lvalue (fold_convert (type
, arg0
));
7851 /* Transform x * -1.0 into -x. */
7852 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
7853 && real_minus_onep (arg1
))
7854 return fold_convert (type
, negate_expr (arg0
));
7856 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7857 if (flag_unsafe_math_optimizations
7858 && TREE_CODE (arg0
) == RDIV_EXPR
7859 && TREE_CODE (arg1
) == REAL_CST
7860 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
7862 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
7865 return fold_build2 (RDIV_EXPR
, type
, tem
,
7866 TREE_OPERAND (arg0
, 1));
7869 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7870 if (operand_equal_p (arg0
, arg1
, 0))
7872 tree tem
= fold_strip_sign_ops (arg0
);
7873 if (tem
!= NULL_TREE
)
7875 tem
= fold_convert (type
, tem
);
7876 return fold_build2 (MULT_EXPR
, type
, tem
, tem
);
7880 if (flag_unsafe_math_optimizations
)
7882 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
7883 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
7885 /* Optimizations of root(...)*root(...). */
7886 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
7888 tree rootfn
, arg
, arglist
;
7889 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
7890 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
7892 /* Optimize sqrt(x)*sqrt(x) as x. */
7893 if (BUILTIN_SQRT_P (fcode0
)
7894 && operand_equal_p (arg00
, arg10
, 0)
7895 && ! HONOR_SNANS (TYPE_MODE (type
)))
7898 /* Optimize root(x)*root(y) as root(x*y). */
7899 rootfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7900 arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
7901 arglist
= build_tree_list (NULL_TREE
, arg
);
7902 return build_function_call_expr (rootfn
, arglist
);
7905 /* Optimize expN(x)*expN(y) as expN(x+y). */
7906 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
7908 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7909 tree arg
= fold_build2 (PLUS_EXPR
, type
,
7910 TREE_VALUE (TREE_OPERAND (arg0
, 1)),
7911 TREE_VALUE (TREE_OPERAND (arg1
, 1)));
7912 tree arglist
= build_tree_list (NULL_TREE
, arg
);
7913 return build_function_call_expr (expfn
, arglist
);
7916 /* Optimizations of pow(...)*pow(...). */
7917 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
7918 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
7919 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
7921 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
7922 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
7924 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
7925 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
7928 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7929 if (operand_equal_p (arg01
, arg11
, 0))
7931 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7932 tree arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
7933 tree arglist
= tree_cons (NULL_TREE
, arg
,
7934 build_tree_list (NULL_TREE
,
7936 return build_function_call_expr (powfn
, arglist
);
7939 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7940 if (operand_equal_p (arg00
, arg10
, 0))
7942 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7943 tree arg
= fold_build2 (PLUS_EXPR
, type
, arg01
, arg11
);
7944 tree arglist
= tree_cons (NULL_TREE
, arg00
,
7945 build_tree_list (NULL_TREE
,
7947 return build_function_call_expr (powfn
, arglist
);
7951 /* Optimize tan(x)*cos(x) as sin(x). */
7952 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
7953 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
7954 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
7955 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
7956 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
7957 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
7958 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
7959 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
7961 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
7963 if (sinfn
!= NULL_TREE
)
7964 return build_function_call_expr (sinfn
,
7965 TREE_OPERAND (arg0
, 1));
7968 /* Optimize x*pow(x,c) as pow(x,c+1). */
7969 if (fcode1
== BUILT_IN_POW
7970 || fcode1
== BUILT_IN_POWF
7971 || fcode1
== BUILT_IN_POWL
)
7973 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
7974 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
7976 if (TREE_CODE (arg11
) == REAL_CST
7977 && ! TREE_CONSTANT_OVERFLOW (arg11
)
7978 && operand_equal_p (arg0
, arg10
, 0))
7980 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
7984 c
= TREE_REAL_CST (arg11
);
7985 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
7986 arg
= build_real (type
, c
);
7987 arglist
= build_tree_list (NULL_TREE
, arg
);
7988 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
7989 return build_function_call_expr (powfn
, arglist
);
7993 /* Optimize pow(x,c)*x as pow(x,c+1). */
7994 if (fcode0
== BUILT_IN_POW
7995 || fcode0
== BUILT_IN_POWF
7996 || fcode0
== BUILT_IN_POWL
)
7998 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
7999 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
8001 if (TREE_CODE (arg01
) == REAL_CST
8002 && ! TREE_CONSTANT_OVERFLOW (arg01
)
8003 && operand_equal_p (arg1
, arg00
, 0))
8005 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
8009 c
= TREE_REAL_CST (arg01
);
8010 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
8011 arg
= build_real (type
, c
);
8012 arglist
= build_tree_list (NULL_TREE
, arg
);
8013 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
8014 return build_function_call_expr (powfn
, arglist
);
8018 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8020 && operand_equal_p (arg0
, arg1
, 0))
8022 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
8026 tree arg
= build_real (type
, dconst2
);
8027 tree arglist
= build_tree_list (NULL_TREE
, arg
);
8028 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
8029 return build_function_call_expr (powfn
, arglist
);
8038 if (integer_all_onesp (arg1
))
8039 return omit_one_operand (type
, arg1
, arg0
);
8040 if (integer_zerop (arg1
))
8041 return non_lvalue (fold_convert (type
, arg0
));
8042 if (operand_equal_p (arg0
, arg1
, 0))
8043 return non_lvalue (fold_convert (type
, arg0
));
8046 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8047 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8049 t1
= build_int_cst (type
, -1);
8050 t1
= force_fit_type (t1
, 0, false, false);
8051 return omit_one_operand (type
, t1
, arg1
);
8055 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
8056 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8058 t1
= build_int_cst (type
, -1);
8059 t1
= force_fit_type (t1
, 0, false, false);
8060 return omit_one_operand (type
, t1
, arg0
);
8063 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
8064 if (t1
!= NULL_TREE
)
8067 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8069 This results in more efficient code for machines without a NAND
8070 instruction. Combine will canonicalize to the first form
8071 which will allow use of NAND instructions provided by the
8072 backend if they exist. */
8073 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8074 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
8076 return fold_build1 (BIT_NOT_EXPR
, type
,
8077 build2 (BIT_AND_EXPR
, type
,
8078 TREE_OPERAND (arg0
, 0),
8079 TREE_OPERAND (arg1
, 0)));
8082 /* See if this can be simplified into a rotate first. If that
8083 is unsuccessful continue in the association code. */
8087 if (integer_zerop (arg1
))
8088 return non_lvalue (fold_convert (type
, arg0
));
8089 if (integer_all_onesp (arg1
))
8090 return fold_build1 (BIT_NOT_EXPR
, type
, arg0
);
8091 if (operand_equal_p (arg0
, arg1
, 0))
8092 return omit_one_operand (type
, integer_zero_node
, arg0
);
8095 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8096 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8098 t1
= build_int_cst (type
, -1);
8099 t1
= force_fit_type (t1
, 0, false, false);
8100 return omit_one_operand (type
, t1
, arg1
);
8104 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
8105 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8107 t1
= build_int_cst (type
, -1);
8108 t1
= force_fit_type (t1
, 0, false, false);
8109 return omit_one_operand (type
, t1
, arg0
);
8112 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8113 with a constant, and the two constants have no bits in common,
8114 we should treat this as a BIT_IOR_EXPR since this may produce more
8116 if (TREE_CODE (arg0
) == BIT_AND_EXPR
8117 && TREE_CODE (arg1
) == BIT_AND_EXPR
8118 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8119 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8120 && integer_zerop (const_binop (BIT_AND_EXPR
,
8121 TREE_OPERAND (arg0
, 1),
8122 TREE_OPERAND (arg1
, 1), 0)))
8124 code
= BIT_IOR_EXPR
;
8128 /* (X | Y) ^ X -> Y & ~ X*/
8129 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
8130 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8132 tree t2
= TREE_OPERAND (arg0
, 1);
8133 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
8135 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
8136 fold_convert (type
, t1
));
8140 /* (Y | X) ^ X -> Y & ~ X*/
8141 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
8142 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
8144 tree t2
= TREE_OPERAND (arg0
, 0);
8145 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
8147 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
8148 fold_convert (type
, t1
));
8152 /* X ^ (X | Y) -> Y & ~ X*/
8153 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
8154 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
8156 tree t2
= TREE_OPERAND (arg1
, 1);
8157 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg0
),
8159 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
8160 fold_convert (type
, t1
));
8164 /* X ^ (Y | X) -> Y & ~ X*/
8165 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
8166 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
8168 tree t2
= TREE_OPERAND (arg1
, 0);
8169 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg0
),
8171 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
8172 fold_convert (type
, t1
));
8176 /* Convert ~X ^ ~Y to X ^ Y. */
8177 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8178 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
8179 return fold_build2 (code
, type
,
8180 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
8181 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
8183 /* See if this can be simplified into a rotate first. If that
8184 is unsuccessful continue in the association code. */
8188 if (integer_all_onesp (arg1
))
8189 return non_lvalue (fold_convert (type
, arg0
));
8190 if (integer_zerop (arg1
))
8191 return omit_one_operand (type
, arg1
, arg0
);
8192 if (operand_equal_p (arg0
, arg1
, 0))
8193 return non_lvalue (fold_convert (type
, arg0
));
8195 /* ~X & X is always zero. */
8196 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8197 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8198 return omit_one_operand (type
, integer_zero_node
, arg1
);
8200 /* X & ~X is always zero. */
8201 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
8202 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8203 return omit_one_operand (type
, integer_zero_node
, arg0
);
8205 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
8206 if (t1
!= NULL_TREE
)
8208 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8209 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
8210 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
8213 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
8215 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
8216 && (~TREE_INT_CST_LOW (arg1
)
8217 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
8218 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
8221 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8223 This results in more efficient code for machines without a NOR
8224 instruction. Combine will canonicalize to the first form
8225 which will allow use of NOR instructions provided by the
8226 backend if they exist. */
8227 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8228 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
8230 return fold_build1 (BIT_NOT_EXPR
, type
,
8231 build2 (BIT_IOR_EXPR
, type
,
8232 TREE_OPERAND (arg0
, 0),
8233 TREE_OPERAND (arg1
, 0)));
8239 /* Don't touch a floating-point divide by zero unless the mode
8240 of the constant can represent infinity. */
8241 if (TREE_CODE (arg1
) == REAL_CST
8242 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
8243 && real_zerop (arg1
))
8246 /* (-A) / (-B) -> A / B */
8247 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
8248 return fold_build2 (RDIV_EXPR
, type
,
8249 TREE_OPERAND (arg0
, 0),
8250 negate_expr (arg1
));
8251 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
8252 return fold_build2 (RDIV_EXPR
, type
,
8254 TREE_OPERAND (arg1
, 0));
8256 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8257 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
8258 && real_onep (arg1
))
8259 return non_lvalue (fold_convert (type
, arg0
));
8261 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8262 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
8263 && real_minus_onep (arg1
))
8264 return non_lvalue (fold_convert (type
, negate_expr (arg0
)));
8266 /* If ARG1 is a constant, we can convert this to a multiply by the
8267 reciprocal. This does not have the same rounding properties,
8268 so only do this if -funsafe-math-optimizations. We can actually
8269 always safely do it if ARG1 is a power of two, but it's hard to
8270 tell if it is or not in a portable manner. */
8271 if (TREE_CODE (arg1
) == REAL_CST
)
8273 if (flag_unsafe_math_optimizations
8274 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
8276 return fold_build2 (MULT_EXPR
, type
, arg0
, tem
);
8277 /* Find the reciprocal if optimizing and the result is exact. */
8281 r
= TREE_REAL_CST (arg1
);
8282 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
8284 tem
= build_real (type
, r
);
8285 return fold_build2 (MULT_EXPR
, type
,
8286 fold_convert (type
, arg0
), tem
);
8290 /* Convert A/B/C to A/(B*C). */
8291 if (flag_unsafe_math_optimizations
8292 && TREE_CODE (arg0
) == RDIV_EXPR
)
8293 return fold_build2 (RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
8294 fold_build2 (MULT_EXPR
, type
,
8295 TREE_OPERAND (arg0
, 1), arg1
));
8297 /* Convert A/(B/C) to (A/B)*C. */
8298 if (flag_unsafe_math_optimizations
8299 && TREE_CODE (arg1
) == RDIV_EXPR
)
8300 return fold_build2 (MULT_EXPR
, type
,
8301 fold_build2 (RDIV_EXPR
, type
, arg0
,
8302 TREE_OPERAND (arg1
, 0)),
8303 TREE_OPERAND (arg1
, 1));
8305 /* Convert C1/(X*C2) into (C1/C2)/X. */
8306 if (flag_unsafe_math_optimizations
8307 && TREE_CODE (arg1
) == MULT_EXPR
8308 && TREE_CODE (arg0
) == REAL_CST
8309 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
8311 tree tem
= const_binop (RDIV_EXPR
, arg0
,
8312 TREE_OPERAND (arg1
, 1), 0);
8314 return fold_build2 (RDIV_EXPR
, type
, tem
,
8315 TREE_OPERAND (arg1
, 0));
8318 if (flag_unsafe_math_optimizations
)
8320 enum built_in_function fcode
= builtin_mathfn_code (arg1
);
8321 /* Optimize x/expN(y) into x*expN(-y). */
8322 if (BUILTIN_EXPONENT_P (fcode
))
8324 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
8325 tree arg
= negate_expr (TREE_VALUE (TREE_OPERAND (arg1
, 1)));
8326 tree arglist
= build_tree_list (NULL_TREE
,
8327 fold_convert (type
, arg
));
8328 arg1
= build_function_call_expr (expfn
, arglist
);
8329 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
8332 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8333 if (fcode
== BUILT_IN_POW
8334 || fcode
== BUILT_IN_POWF
8335 || fcode
== BUILT_IN_POWL
)
8337 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
8338 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
8339 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
, 1)));
8340 tree neg11
= fold_convert (type
, negate_expr (arg11
));
8341 tree arglist
= tree_cons(NULL_TREE
, arg10
,
8342 build_tree_list (NULL_TREE
, neg11
));
8343 arg1
= build_function_call_expr (powfn
, arglist
);
8344 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
8348 if (flag_unsafe_math_optimizations
)
8350 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
8351 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
8353 /* Optimize sin(x)/cos(x) as tan(x). */
8354 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
8355 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
8356 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
8357 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
8358 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
8360 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
8362 if (tanfn
!= NULL_TREE
)
8363 return build_function_call_expr (tanfn
,
8364 TREE_OPERAND (arg0
, 1));
8367 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8368 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
8369 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
8370 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
8371 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
8372 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
8374 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
8376 if (tanfn
!= NULL_TREE
)
8378 tree tmp
= TREE_OPERAND (arg0
, 1);
8379 tmp
= build_function_call_expr (tanfn
, tmp
);
8380 return fold_build2 (RDIV_EXPR
, type
,
8381 build_real (type
, dconst1
), tmp
);
8385 /* Optimize pow(x,c)/x as pow(x,c-1). */
8386 if (fcode0
== BUILT_IN_POW
8387 || fcode0
== BUILT_IN_POWF
8388 || fcode0
== BUILT_IN_POWL
)
8390 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
8391 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
, 1)));
8392 if (TREE_CODE (arg01
) == REAL_CST
8393 && ! TREE_CONSTANT_OVERFLOW (arg01
)
8394 && operand_equal_p (arg1
, arg00
, 0))
8396 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
8400 c
= TREE_REAL_CST (arg01
);
8401 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
8402 arg
= build_real (type
, c
);
8403 arglist
= build_tree_list (NULL_TREE
, arg
);
8404 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
8405 return build_function_call_expr (powfn
, arglist
);
8411 case TRUNC_DIV_EXPR
:
8412 case ROUND_DIV_EXPR
:
8413 case FLOOR_DIV_EXPR
:
8415 case EXACT_DIV_EXPR
:
8416 if (integer_onep (arg1
))
8417 return non_lvalue (fold_convert (type
, arg0
));
8418 if (integer_zerop (arg1
))
8421 if (!TYPE_UNSIGNED (type
)
8422 && TREE_CODE (arg1
) == INTEGER_CST
8423 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
8424 && TREE_INT_CST_HIGH (arg1
) == -1)
8425 return fold_convert (type
, negate_expr (arg0
));
8427 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8428 operation, EXACT_DIV_EXPR.
8430 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8431 At one time others generated faster code, it's not clear if they do
8432 after the last round to changes to the DIV code in expmed.c. */
8433 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
8434 && multiple_of_p (type
, arg0
, arg1
))
8435 return fold_build2 (EXACT_DIV_EXPR
, type
, arg0
, arg1
);
8437 if (TREE_CODE (arg1
) == INTEGER_CST
8438 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
)))
8439 return fold_convert (type
, tem
);
8444 case FLOOR_MOD_EXPR
:
8445 case ROUND_MOD_EXPR
:
8446 case TRUNC_MOD_EXPR
:
8447 /* X % 1 is always zero, but be sure to preserve any side
8449 if (integer_onep (arg1
))
8450 return omit_one_operand (type
, integer_zero_node
, arg0
);
8452 /* X % 0, return X % 0 unchanged so that we can get the
8453 proper warnings and errors. */
8454 if (integer_zerop (arg1
))
8457 /* 0 % X is always zero, but be sure to preserve any side
8458 effects in X. Place this after checking for X == 0. */
8459 if (integer_zerop (arg0
))
8460 return omit_one_operand (type
, integer_zero_node
, arg1
);
8462 /* X % -1 is zero. */
8463 if (!TYPE_UNSIGNED (type
)
8464 && TREE_CODE (arg1
) == INTEGER_CST
8465 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
8466 && TREE_INT_CST_HIGH (arg1
) == -1)
8467 return omit_one_operand (type
, integer_zero_node
, arg0
);
8469 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
8470 i.e. "X % C" into "X & C2", if X and C are positive. */
8471 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
8472 && (TYPE_UNSIGNED (type
) || tree_expr_nonnegative_p (arg0
))
8473 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) >= 0)
8475 unsigned HOST_WIDE_INT high
, low
;
8479 l
= tree_log2 (arg1
);
8480 if (l
>= HOST_BITS_PER_WIDE_INT
)
8482 high
= ((unsigned HOST_WIDE_INT
) 1
8483 << (l
- HOST_BITS_PER_WIDE_INT
)) - 1;
8489 low
= ((unsigned HOST_WIDE_INT
) 1 << l
) - 1;
8492 mask
= build_int_cst_wide (type
, low
, high
);
8493 return fold_build2 (BIT_AND_EXPR
, type
,
8494 fold_convert (type
, arg0
), mask
);
8497 /* X % -C is the same as X % C. */
8498 if (code
== TRUNC_MOD_EXPR
8499 && !TYPE_UNSIGNED (type
)
8500 && TREE_CODE (arg1
) == INTEGER_CST
8501 && !TREE_CONSTANT_OVERFLOW (arg1
)
8502 && TREE_INT_CST_HIGH (arg1
) < 0
8504 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8505 && !sign_bit_p (arg1
, arg1
))
8506 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
8507 fold_convert (type
, negate_expr (arg1
)));
8509 /* X % -Y is the same as X % Y. */
8510 if (code
== TRUNC_MOD_EXPR
8511 && !TYPE_UNSIGNED (type
)
8512 && TREE_CODE (arg1
) == NEGATE_EXPR
8514 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
8515 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
8517 if (TREE_CODE (arg1
) == INTEGER_CST
8518 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
)))
8519 return fold_convert (type
, tem
);
8525 if (integer_all_onesp (arg0
))
8526 return omit_one_operand (type
, arg0
, arg1
);
8530 /* Optimize -1 >> x for arithmetic right shifts. */
8531 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
))
8532 return omit_one_operand (type
, arg0
, arg1
);
8533 /* ... fall through ... */
8537 if (integer_zerop (arg1
))
8538 return non_lvalue (fold_convert (type
, arg0
));
8539 if (integer_zerop (arg0
))
8540 return omit_one_operand (type
, arg0
, arg1
);
8542 /* Since negative shift count is not well-defined,
8543 don't try to compute it in the compiler. */
8544 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
8547 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
8548 if (TREE_CODE (arg0
) == code
&& host_integerp (arg1
, false)
8549 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
8550 && host_integerp (TREE_OPERAND (arg0
, 1), false)
8551 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
8553 HOST_WIDE_INT low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
8554 + TREE_INT_CST_LOW (arg1
));
8556 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
8557 being well defined. */
8558 if (low
>= TYPE_PRECISION (type
))
8560 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
8561 low
= low
% TYPE_PRECISION (type
);
8562 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
8563 return build_int_cst (type
, 0);
8565 low
= TYPE_PRECISION (type
) - 1;
8568 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
8569 build_int_cst (type
, low
));
8572 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
8573 into x & ((unsigned)-1 >> c) for unsigned types. */
8574 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
8575 || (TYPE_UNSIGNED (type
)
8576 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
8577 && host_integerp (arg1
, false)
8578 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
8579 && host_integerp (TREE_OPERAND (arg0
, 1), false)
8580 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
8582 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
8583 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
8589 arg00
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
8591 lshift
= build_int_cst (type
, -1);
8592 lshift
= int_const_binop (code
, lshift
, arg1
, 0);
8594 return fold_build2 (BIT_AND_EXPR
, type
, arg00
, lshift
);
8598 /* Rewrite an LROTATE_EXPR by a constant into an
8599 RROTATE_EXPR by a new constant. */
8600 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
8602 tree tem
= build_int_cst (NULL_TREE
,
8603 GET_MODE_BITSIZE (TYPE_MODE (type
)));
8604 tem
= fold_convert (TREE_TYPE (arg1
), tem
);
8605 tem
= const_binop (MINUS_EXPR
, tem
, arg1
, 0);
8606 return fold_build2 (RROTATE_EXPR
, type
, arg0
, tem
);
8609 /* If we have a rotate of a bit operation with the rotate count and
8610 the second operand of the bit operation both constant,
8611 permute the two operations. */
8612 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
8613 && (TREE_CODE (arg0
) == BIT_AND_EXPR
8614 || TREE_CODE (arg0
) == BIT_IOR_EXPR
8615 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
8616 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8617 return fold_build2 (TREE_CODE (arg0
), type
,
8618 fold_build2 (code
, type
,
8619 TREE_OPERAND (arg0
, 0), arg1
),
8620 fold_build2 (code
, type
,
8621 TREE_OPERAND (arg0
, 1), arg1
));
8623 /* Two consecutive rotates adding up to the width of the mode can
8625 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
8626 && TREE_CODE (arg0
) == RROTATE_EXPR
8627 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8628 && TREE_INT_CST_HIGH (arg1
) == 0
8629 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
8630 && ((TREE_INT_CST_LOW (arg1
)
8631 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
8632 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type
))))
8633 return TREE_OPERAND (arg0
, 0);
8638 if (operand_equal_p (arg0
, arg1
, 0))
8639 return omit_one_operand (type
, arg0
, arg1
);
8640 if (INTEGRAL_TYPE_P (type
)
8641 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
8642 return omit_one_operand (type
, arg1
, arg0
);
8646 if (operand_equal_p (arg0
, arg1
, 0))
8647 return omit_one_operand (type
, arg0
, arg1
);
8648 if (INTEGRAL_TYPE_P (type
)
8649 && TYPE_MAX_VALUE (type
)
8650 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
8651 return omit_one_operand (type
, arg1
, arg0
);
8654 case TRUTH_ANDIF_EXPR
:
8655 /* Note that the operands of this must be ints
8656 and their values must be 0 or 1.
8657 ("true" is a fixed value perhaps depending on the language.) */
8658 /* If first arg is constant zero, return it. */
8659 if (integer_zerop (arg0
))
8660 return fold_convert (type
, arg0
);
8661 case TRUTH_AND_EXPR
:
8662 /* If either arg is constant true, drop it. */
8663 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
8664 return non_lvalue (fold_convert (type
, arg1
));
8665 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
8666 /* Preserve sequence points. */
8667 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
8668 return non_lvalue (fold_convert (type
, arg0
));
8669 /* If second arg is constant zero, result is zero, but first arg
8670 must be evaluated. */
8671 if (integer_zerop (arg1
))
8672 return omit_one_operand (type
, arg1
, arg0
);
8673 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8674 case will be handled here. */
8675 if (integer_zerop (arg0
))
8676 return omit_one_operand (type
, arg0
, arg1
);
8678 /* !X && X is always false. */
8679 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
8680 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8681 return omit_one_operand (type
, integer_zero_node
, arg1
);
8682 /* X && !X is always false. */
8683 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
8684 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8685 return omit_one_operand (type
, integer_zero_node
, arg0
);
8687 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8688 means A >= Y && A != MAX, but in this case we know that
8691 if (!TREE_SIDE_EFFECTS (arg0
)
8692 && !TREE_SIDE_EFFECTS (arg1
))
8694 tem
= fold_to_nonsharp_ineq_using_bound (arg0
, arg1
);
8695 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
8696 return fold_build2 (code
, type
, tem
, arg1
);
8698 tem
= fold_to_nonsharp_ineq_using_bound (arg1
, arg0
);
8699 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
8700 return fold_build2 (code
, type
, arg0
, tem
);
8704 /* We only do these simplifications if we are optimizing. */
8708 /* Check for things like (A || B) && (A || C). We can convert this
8709 to A || (B && C). Note that either operator can be any of the four
8710 truth and/or operations and the transformation will still be
8711 valid. Also note that we only care about order for the
8712 ANDIF and ORIF operators. If B contains side effects, this
8713 might change the truth-value of A. */
8714 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8715 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8716 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8717 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8718 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8719 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8721 tree a00
= TREE_OPERAND (arg0
, 0);
8722 tree a01
= TREE_OPERAND (arg0
, 1);
8723 tree a10
= TREE_OPERAND (arg1
, 0);
8724 tree a11
= TREE_OPERAND (arg1
, 1);
8725 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8726 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8727 && (code
== TRUTH_AND_EXPR
8728 || code
== TRUTH_OR_EXPR
));
8730 if (operand_equal_p (a00
, a10
, 0))
8731 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
8732 fold_build2 (code
, type
, a01
, a11
));
8733 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8734 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
8735 fold_build2 (code
, type
, a01
, a10
));
8736 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8737 return fold_build2 (TREE_CODE (arg0
), type
, a01
,
8738 fold_build2 (code
, type
, a00
, a11
));
8740 /* This case if tricky because we must either have commutative
8741 operators or else A10 must not have side-effects. */
8743 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8744 && operand_equal_p (a01
, a11
, 0))
8745 return fold_build2 (TREE_CODE (arg0
), type
,
8746 fold_build2 (code
, type
, a00
, a10
),
8750 /* See if we can build a range comparison. */
8751 if (0 != (tem
= fold_range_test (code
, type
, op0
, op1
)))
8754 /* Check for the possibility of merging component references. If our
8755 lhs is another similar operation, try to merge its rhs with our
8756 rhs. Then try to merge our lhs and rhs. */
8757 if (TREE_CODE (arg0
) == code
8758 && 0 != (tem
= fold_truthop (code
, type
,
8759 TREE_OPERAND (arg0
, 1), arg1
)))
8760 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8762 if ((tem
= fold_truthop (code
, type
, arg0
, arg1
)) != 0)
8767 case TRUTH_ORIF_EXPR
:
8768 /* Note that the operands of this must be ints
8769 and their values must be 0 or true.
8770 ("true" is a fixed value perhaps depending on the language.) */
8771 /* If first arg is constant true, return it. */
8772 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
8773 return fold_convert (type
, arg0
);
8775 /* If either arg is constant zero, drop it. */
8776 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
8777 return non_lvalue (fold_convert (type
, arg1
));
8778 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
8779 /* Preserve sequence points. */
8780 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
8781 return non_lvalue (fold_convert (type
, arg0
));
8782 /* If second arg is constant true, result is true, but we must
8783 evaluate first arg. */
8784 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
8785 return omit_one_operand (type
, arg1
, arg0
);
8786 /* Likewise for first arg, but note this only occurs here for
8788 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
8789 return omit_one_operand (type
, arg0
, arg1
);
8791 /* !X || X is always true. */
8792 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
8793 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8794 return omit_one_operand (type
, integer_one_node
, arg1
);
8795 /* X || !X is always true. */
8796 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
8797 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8798 return omit_one_operand (type
, integer_one_node
, arg0
);
8802 case TRUTH_XOR_EXPR
:
8803 /* If the second arg is constant zero, drop it. */
8804 if (integer_zerop (arg1
))
8805 return non_lvalue (fold_convert (type
, arg0
));
8806 /* If the second arg is constant true, this is a logical inversion. */
8807 if (integer_onep (arg1
))
8809 /* Only call invert_truthvalue if operand is a truth value. */
8810 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
8811 tem
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
8813 tem
= invert_truthvalue (arg0
);
8814 return non_lvalue (fold_convert (type
, tem
));
8816 /* Identical arguments cancel to zero. */
8817 if (operand_equal_p (arg0
, arg1
, 0))
8818 return omit_one_operand (type
, integer_zero_node
, arg0
);
8820 /* !X ^ X is always true. */
8821 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
8822 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8823 return omit_one_operand (type
, integer_one_node
, arg1
);
8825 /* X ^ !X is always true. */
8826 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
8827 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8828 return omit_one_operand (type
, integer_one_node
, arg0
);
8838 /* If one arg is a real or integer constant, put it last. */
8839 if (tree_swap_operands_p (arg0
, arg1
, true))
8840 return fold_build2 (swap_tree_comparison (code
), type
, op1
, op0
);
8842 /* bool_var != 0 becomes bool_var. */
8843 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
8845 return non_lvalue (fold_convert (type
, arg0
));
8847 /* bool_var == 1 becomes bool_var. */
8848 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
8850 return non_lvalue (fold_convert (type
, arg0
));
8852 /* If this is an equality comparison of the address of a non-weak
8853 object against zero, then we know the result. */
8854 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8855 && TREE_CODE (arg0
) == ADDR_EXPR
8856 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
8857 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
8858 && integer_zerop (arg1
))
8859 return constant_boolean_node (code
!= EQ_EXPR
, type
);
8861 /* If this is an equality comparison of the address of two non-weak,
8862 unaliased symbols neither of which are extern (since we do not
8863 have access to attributes for externs), then we know the result. */
8864 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8865 && TREE_CODE (arg0
) == ADDR_EXPR
8866 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
8867 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
8868 && ! lookup_attribute ("alias",
8869 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
8870 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
8871 && TREE_CODE (arg1
) == ADDR_EXPR
8872 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
8873 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
8874 && ! lookup_attribute ("alias",
8875 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
8876 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
8878 /* We know that we're looking at the address of two
8879 non-weak, unaliased, static _DECL nodes.
8881 It is both wasteful and incorrect to call operand_equal_p
8882 to compare the two ADDR_EXPR nodes. It is wasteful in that
8883 all we need to do is test pointer equality for the arguments
8884 to the two ADDR_EXPR nodes. It is incorrect to use
8885 operand_equal_p as that function is NOT equivalent to a
8886 C equality test. It can in fact return false for two
8887 objects which would test as equal using the C equality
8889 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
8890 return constant_boolean_node (equal
8891 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
8895 /* If this is a comparison of two exprs that look like an
8896 ARRAY_REF of the same object, then we can fold this to a
8897 comparison of the two offsets. */
8898 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
8900 tree base0
, offset0
, base1
, offset1
;
8902 if (extract_array_ref (arg0
, &base0
, &offset0
)
8903 && extract_array_ref (arg1
, &base1
, &offset1
)
8904 && operand_equal_p (base0
, base1
, 0))
8906 /* Handle no offsets on both sides specially. */
8907 if (offset0
== NULL_TREE
8908 && offset1
== NULL_TREE
)
8909 return fold_build2 (code
, type
, integer_zero_node
,
8912 if (!offset0
|| !offset1
8913 || TREE_TYPE (offset0
) == TREE_TYPE (offset1
))
8915 if (offset0
== NULL_TREE
)
8916 offset0
= build_int_cst (TREE_TYPE (offset1
), 0);
8917 if (offset1
== NULL_TREE
)
8918 offset1
= build_int_cst (TREE_TYPE (offset0
), 0);
8919 return fold_build2 (code
, type
, offset0
, offset1
);
8924 /* Transform comparisons of the form X +- C CMP X. */
8925 if ((code
!= EQ_EXPR
&& code
!= NE_EXPR
)
8926 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8927 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
8928 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
8929 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
8930 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8931 && !TYPE_UNSIGNED (TREE_TYPE (arg1
))
8932 && !(flag_wrapv
|| flag_trapv
))))
8934 tree arg01
= TREE_OPERAND (arg0
, 1);
8935 enum tree_code code0
= TREE_CODE (arg0
);
8938 if (TREE_CODE (arg01
) == REAL_CST
)
8939 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
8941 is_positive
= tree_int_cst_sgn (arg01
);
8943 /* (X - c) > X becomes false. */
8945 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
8946 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
8947 return constant_boolean_node (0, type
);
8949 /* Likewise (X + c) < X becomes false. */
8951 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
8952 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
8953 return constant_boolean_node (0, type
);
8955 /* Convert (X - c) <= X to true. */
8956 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
8958 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
8959 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
8960 return constant_boolean_node (1, type
);
8962 /* Convert (X + c) >= X to true. */
8963 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
8965 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
8966 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
8967 return constant_boolean_node (1, type
);
8969 if (TREE_CODE (arg01
) == INTEGER_CST
)
8971 /* Convert X + c > X and X - c < X to true for integers. */
8973 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
8974 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
8975 return constant_boolean_node (1, type
);
8978 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
8979 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
8980 return constant_boolean_node (1, type
);
8982 /* Convert X + c <= X and X - c >= X to false for integers. */
8984 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
8985 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
8986 return constant_boolean_node (0, type
);
8989 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
8990 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
8991 return constant_boolean_node (0, type
);
8995 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8996 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8997 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8998 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8999 && !TYPE_UNSIGNED (TREE_TYPE (arg1
))
9000 && !(flag_wrapv
|| flag_trapv
))
9001 && (TREE_CODE (arg1
) == INTEGER_CST
9002 && !TREE_OVERFLOW (arg1
)))
9004 tree const1
= TREE_OPERAND (arg0
, 1);
9006 tree variable
= TREE_OPERAND (arg0
, 0);
9009 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
9011 lhs
= fold_build2 (lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
9012 TREE_TYPE (arg1
), const2
, const1
);
9013 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
9014 && (TREE_CODE (lhs
) != INTEGER_CST
9015 || !TREE_OVERFLOW (lhs
)))
9016 return fold_build2 (code
, type
, variable
, lhs
);
9019 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9021 tree targ0
= strip_float_extensions (arg0
);
9022 tree targ1
= strip_float_extensions (arg1
);
9023 tree newtype
= TREE_TYPE (targ0
);
9025 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9026 newtype
= TREE_TYPE (targ1
);
9028 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9029 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9030 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
9031 fold_convert (newtype
, targ1
));
9033 /* (-a) CMP (-b) -> b CMP a */
9034 if (TREE_CODE (arg0
) == NEGATE_EXPR
9035 && TREE_CODE (arg1
) == NEGATE_EXPR
)
9036 return fold_build2 (code
, type
, TREE_OPERAND (arg1
, 0),
9037 TREE_OPERAND (arg0
, 0));
9039 if (TREE_CODE (arg1
) == REAL_CST
)
9041 REAL_VALUE_TYPE cst
;
9042 cst
= TREE_REAL_CST (arg1
);
9044 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9045 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
9047 fold_build2 (swap_tree_comparison (code
), type
,
9048 TREE_OPERAND (arg0
, 0),
9049 build_real (TREE_TYPE (arg1
),
9050 REAL_VALUE_NEGATE (cst
)));
9052 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9053 /* a CMP (-0) -> a CMP 0 */
9054 if (REAL_VALUE_MINUS_ZERO (cst
))
9055 return fold_build2 (code
, type
, arg0
,
9056 build_real (TREE_TYPE (arg1
), dconst0
));
9058 /* x != NaN is always true, other ops are always false. */
9059 if (REAL_VALUE_ISNAN (cst
)
9060 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
9062 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9063 return omit_one_operand (type
, tem
, arg0
);
9066 /* Fold comparisons against infinity. */
9067 if (REAL_VALUE_ISINF (cst
))
9069 tem
= fold_inf_compare (code
, type
, arg0
, arg1
);
9070 if (tem
!= NULL_TREE
)
9075 /* If this is a comparison of a real constant with a PLUS_EXPR
9076 or a MINUS_EXPR of a real constant, we can convert it into a
9077 comparison with a revised real constant as long as no overflow
9078 occurs when unsafe_math_optimizations are enabled. */
9079 if (flag_unsafe_math_optimizations
9080 && TREE_CODE (arg1
) == REAL_CST
9081 && (TREE_CODE (arg0
) == PLUS_EXPR
9082 || TREE_CODE (arg0
) == MINUS_EXPR
)
9083 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9084 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9085 ? MINUS_EXPR
: PLUS_EXPR
,
9086 arg1
, TREE_OPERAND (arg0
, 1), 0))
9087 && ! TREE_CONSTANT_OVERFLOW (tem
))
9088 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9090 /* Likewise, we can simplify a comparison of a real constant with
9091 a MINUS_EXPR whose first operand is also a real constant, i.e.
9092 (c1 - x) < c2 becomes x > c1-c2. */
9093 if (flag_unsafe_math_optimizations
9094 && TREE_CODE (arg1
) == REAL_CST
9095 && TREE_CODE (arg0
) == MINUS_EXPR
9096 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9097 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9099 && ! TREE_CONSTANT_OVERFLOW (tem
))
9100 return fold_build2 (swap_tree_comparison (code
), type
,
9101 TREE_OPERAND (arg0
, 1), tem
);
9103 /* Fold comparisons against built-in math functions. */
9104 if (TREE_CODE (arg1
) == REAL_CST
9105 && flag_unsafe_math_optimizations
9106 && ! flag_errno_math
)
9108 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9110 if (fcode
!= END_BUILTINS
)
9112 tem
= fold_mathfn_compare (fcode
, code
, type
, arg0
, arg1
);
9113 if (tem
!= NULL_TREE
)
9119 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
9120 if (TREE_CONSTANT (arg1
)
9121 && (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
9122 || TREE_CODE (arg0
) == POSTDECREMENT_EXPR
)
9123 /* This optimization is invalid for ordered comparisons
9124 if CONST+INCR overflows or if foo+incr might overflow.
9125 This optimization is invalid for floating point due to rounding.
9126 For pointer types we assume overflow doesn't happen. */
9127 && (POINTER_TYPE_P (TREE_TYPE (arg0
))
9128 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
9129 && (code
== EQ_EXPR
|| code
== NE_EXPR
))))
9131 tree varop
, newconst
;
9133 if (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
)
9135 newconst
= fold_build2 (PLUS_EXPR
, TREE_TYPE (arg0
),
9136 arg1
, TREE_OPERAND (arg0
, 1));
9137 varop
= build2 (PREINCREMENT_EXPR
, TREE_TYPE (arg0
),
9138 TREE_OPERAND (arg0
, 0),
9139 TREE_OPERAND (arg0
, 1));
9143 newconst
= fold_build2 (MINUS_EXPR
, TREE_TYPE (arg0
),
9144 arg1
, TREE_OPERAND (arg0
, 1));
9145 varop
= build2 (PREDECREMENT_EXPR
, TREE_TYPE (arg0
),
9146 TREE_OPERAND (arg0
, 0),
9147 TREE_OPERAND (arg0
, 1));
9151 /* If VAROP is a reference to a bitfield, we must mask
9152 the constant by the width of the field. */
9153 if (TREE_CODE (TREE_OPERAND (varop
, 0)) == COMPONENT_REF
9154 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop
, 0), 1))
9155 && host_integerp (DECL_SIZE (TREE_OPERAND
9156 (TREE_OPERAND (varop
, 0), 1)), 1))
9158 tree fielddecl
= TREE_OPERAND (TREE_OPERAND (varop
, 0), 1);
9159 HOST_WIDE_INT size
= tree_low_cst (DECL_SIZE (fielddecl
), 1);
9160 tree folded_compare
, shift
;
9162 /* First check whether the comparison would come out
9163 always the same. If we don't do that we would
9164 change the meaning with the masking. */
9165 folded_compare
= fold_build2 (code
, type
,
9166 TREE_OPERAND (varop
, 0), arg1
);
9167 if (integer_zerop (folded_compare
)
9168 || integer_onep (folded_compare
))
9169 return omit_one_operand (type
, folded_compare
, varop
);
9171 shift
= build_int_cst (NULL_TREE
,
9172 TYPE_PRECISION (TREE_TYPE (varop
)) - size
);
9173 shift
= fold_convert (TREE_TYPE (varop
), shift
);
9174 newconst
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (varop
),
9176 newconst
= fold_build2 (RSHIFT_EXPR
, TREE_TYPE (varop
),
9180 return fold_build2 (code
, type
, varop
, newconst
);
9183 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9184 This transformation affects the cases which are handled in later
9185 optimizations involving comparisons with non-negative constants. */
9186 if (TREE_CODE (arg1
) == INTEGER_CST
9187 && TREE_CODE (arg0
) != INTEGER_CST
9188 && tree_int_cst_sgn (arg1
) > 0)
9193 arg1
= const_binop (MINUS_EXPR
, arg1
,
9194 build_int_cst (TREE_TYPE (arg1
), 1), 0);
9195 return fold_build2 (GT_EXPR
, type
, arg0
,
9196 fold_convert (TREE_TYPE (arg0
), arg1
));
9199 arg1
= const_binop (MINUS_EXPR
, arg1
,
9200 build_int_cst (TREE_TYPE (arg1
), 1), 0);
9201 return fold_build2 (LE_EXPR
, type
, arg0
,
9202 fold_convert (TREE_TYPE (arg0
), arg1
));
9209 /* Comparisons with the highest or lowest possible integer of
9210 the specified size will have known values. */
9212 int width
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1
)));
9214 if (TREE_CODE (arg1
) == INTEGER_CST
9215 && ! TREE_CONSTANT_OVERFLOW (arg1
)
9216 && width
<= 2 * HOST_BITS_PER_WIDE_INT
9217 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9218 || POINTER_TYPE_P (TREE_TYPE (arg1
))))
9220 HOST_WIDE_INT signed_max_hi
;
9221 unsigned HOST_WIDE_INT signed_max_lo
;
9222 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
9224 if (width
<= HOST_BITS_PER_WIDE_INT
)
9226 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
9231 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
9233 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
9239 max_lo
= signed_max_lo
;
9240 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
9246 width
-= HOST_BITS_PER_WIDE_INT
;
9248 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
9253 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
9255 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
9260 max_hi
= signed_max_hi
;
9261 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
9265 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
9266 && TREE_INT_CST_LOW (arg1
) == max_lo
)
9270 return omit_one_operand (type
, integer_zero_node
, arg0
);
9273 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
9276 return omit_one_operand (type
, integer_one_node
, arg0
);
9279 return fold_build2 (NE_EXPR
, type
, arg0
, arg1
);
9281 /* The GE_EXPR and LT_EXPR cases above are not normally
9282 reached because of previous transformations. */
9287 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
9289 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
9293 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
9294 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
9296 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
9297 return fold_build2 (NE_EXPR
, type
, arg0
, arg1
);
9301 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
9303 && TREE_INT_CST_LOW (arg1
) == min_lo
)
9307 return omit_one_operand (type
, integer_zero_node
, arg0
);
9310 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
9313 return omit_one_operand (type
, integer_one_node
, arg0
);
9316 return fold_build2 (NE_EXPR
, type
, op0
, op1
);
9321 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
9323 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
9327 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
9328 return fold_build2 (NE_EXPR
, type
, arg0
, arg1
);
9330 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
9331 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
9336 else if (!in_gimple_form
9337 && TREE_INT_CST_HIGH (arg1
) == signed_max_hi
9338 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
9339 && TYPE_UNSIGNED (TREE_TYPE (arg1
))
9340 /* signed_type does not work on pointer types. */
9341 && INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
9343 /* The following case also applies to X < signed_max+1
9344 and X >= signed_max+1 because previous transformations. */
9345 if (code
== LE_EXPR
|| code
== GT_EXPR
)
9348 st0
= lang_hooks
.types
.signed_type (TREE_TYPE (arg0
));
9349 st1
= lang_hooks
.types
.signed_type (TREE_TYPE (arg1
));
9350 return fold_build2 (code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
9351 type
, fold_convert (st0
, arg0
),
9352 build_int_cst (st1
, 0));
9358 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9359 a MINUS_EXPR of a constant, we can convert it into a comparison with
9360 a revised constant as long as no overflow occurs. */
9361 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9362 && TREE_CODE (arg1
) == INTEGER_CST
9363 && (TREE_CODE (arg0
) == PLUS_EXPR
9364 || TREE_CODE (arg0
) == MINUS_EXPR
)
9365 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9366 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9367 ? MINUS_EXPR
: PLUS_EXPR
,
9368 arg1
, TREE_OPERAND (arg0
, 1), 0))
9369 && ! TREE_CONSTANT_OVERFLOW (tem
))
9370 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9372 /* Similarly for a NEGATE_EXPR. */
9373 else if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9374 && TREE_CODE (arg0
) == NEGATE_EXPR
9375 && TREE_CODE (arg1
) == INTEGER_CST
9376 && 0 != (tem
= negate_expr (arg1
))
9377 && TREE_CODE (tem
) == INTEGER_CST
9378 && ! TREE_CONSTANT_OVERFLOW (tem
))
9379 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9381 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9382 for !=. Don't do this for ordered comparisons due to overflow. */
9383 else if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
9384 && integer_zerop (arg1
) && TREE_CODE (arg0
) == MINUS_EXPR
)
9385 return fold_build2 (code
, type
,
9386 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
9388 else if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9389 && (TREE_CODE (arg0
) == NOP_EXPR
9390 || TREE_CODE (arg0
) == CONVERT_EXPR
))
9392 /* If we are widening one operand of an integer comparison,
9393 see if the other operand is similarly being widened. Perhaps we
9394 can do the comparison in the narrower type. */
9395 tem
= fold_widened_comparison (code
, type
, arg0
, arg1
);
9399 /* Or if we are changing signedness. */
9400 tem
= fold_sign_changed_comparison (code
, type
, arg0
, arg1
);
9405 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9406 constant, we can simplify it. */
9407 else if (TREE_CODE (arg1
) == INTEGER_CST
9408 && (TREE_CODE (arg0
) == MIN_EXPR
9409 || TREE_CODE (arg0
) == MAX_EXPR
)
9410 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9412 tem
= optimize_minmax_comparison (code
, type
, op0
, op1
);
9419 /* If we are comparing an ABS_EXPR with a constant, we can
9420 convert all the cases into explicit comparisons, but they may
9421 well not be faster than doing the ABS and one comparison.
9422 But ABS (X) <= C is a range comparison, which becomes a subtraction
9423 and a comparison, and is probably faster. */
9424 else if (code
== LE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
9425 && TREE_CODE (arg0
) == ABS_EXPR
9426 && ! TREE_SIDE_EFFECTS (arg0
)
9427 && (0 != (tem
= negate_expr (arg1
)))
9428 && TREE_CODE (tem
) == INTEGER_CST
9429 && ! TREE_CONSTANT_OVERFLOW (tem
))
9430 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
9431 build2 (GE_EXPR
, type
,
9432 TREE_OPERAND (arg0
, 0), tem
),
9433 build2 (LE_EXPR
, type
,
9434 TREE_OPERAND (arg0
, 0), arg1
));
9436 /* Convert ABS_EXPR<x> >= 0 to true. */
9437 else if (code
== GE_EXPR
9438 && tree_expr_nonnegative_p (arg0
)
9439 && (integer_zerop (arg1
)
9440 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
9441 && real_zerop (arg1
))))
9442 return omit_one_operand (type
, integer_one_node
, arg0
);
9444 /* Convert ABS_EXPR<x> < 0 to false. */
9445 else if (code
== LT_EXPR
9446 && tree_expr_nonnegative_p (arg0
)
9447 && (integer_zerop (arg1
) || real_zerop (arg1
)))
9448 return omit_one_operand (type
, integer_zero_node
, arg0
);
9450 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9451 else if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9452 && TREE_CODE (arg0
) == ABS_EXPR
9453 && (integer_zerop (arg1
) || real_zerop (arg1
)))
9454 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
9456 /* If this is an EQ or NE comparison with zero and ARG0 is
9457 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9458 two operations, but the latter can be done in one less insn
9459 on machines that have only two-operand insns or on which a
9460 constant cannot be the first operand. */
9461 if (integer_zerop (arg1
) && (code
== EQ_EXPR
|| code
== NE_EXPR
)
9462 && TREE_CODE (arg0
) == BIT_AND_EXPR
)
9464 tree arg00
= TREE_OPERAND (arg0
, 0);
9465 tree arg01
= TREE_OPERAND (arg0
, 1);
9466 if (TREE_CODE (arg00
) == LSHIFT_EXPR
9467 && integer_onep (TREE_OPERAND (arg00
, 0)))
9469 fold_build2 (code
, type
,
9470 build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
9471 build2 (RSHIFT_EXPR
, TREE_TYPE (arg00
),
9472 arg01
, TREE_OPERAND (arg00
, 1)),
9473 fold_convert (TREE_TYPE (arg0
),
9476 else if (TREE_CODE (TREE_OPERAND (arg0
, 1)) == LSHIFT_EXPR
9477 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 1), 0)))
9479 fold_build2 (code
, type
,
9480 build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
9481 build2 (RSHIFT_EXPR
, TREE_TYPE (arg01
),
9482 arg00
, TREE_OPERAND (arg01
, 1)),
9483 fold_convert (TREE_TYPE (arg0
),
9488 /* If this is an NE or EQ comparison of zero against the result of a
9489 signed MOD operation whose second operand is a power of 2, make
9490 the MOD operation unsigned since it is simpler and equivalent. */
9491 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
9492 && integer_zerop (arg1
)
9493 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
9494 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
9495 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
9496 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
9497 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
9498 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
9500 tree newtype
= lang_hooks
.types
.unsigned_type (TREE_TYPE (arg0
));
9501 tree newmod
= fold_build2 (TREE_CODE (arg0
), newtype
,
9502 fold_convert (newtype
,
9503 TREE_OPERAND (arg0
, 0)),
9504 fold_convert (newtype
,
9505 TREE_OPERAND (arg0
, 1)));
9507 return fold_build2 (code
, type
, newmod
,
9508 fold_convert (newtype
, arg1
));
9511 /* If this is an NE comparison of zero with an AND of one, remove the
9512 comparison since the AND will give the correct value. */
9513 if (code
== NE_EXPR
&& integer_zerop (arg1
)
9514 && TREE_CODE (arg0
) == BIT_AND_EXPR
9515 && integer_onep (TREE_OPERAND (arg0
, 1)))
9516 return fold_convert (type
, arg0
);
9518 /* If we have (A & C) == C where C is a power of 2, convert this into
9519 (A & C) != 0. Similarly for NE_EXPR. */
9520 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9521 && TREE_CODE (arg0
) == BIT_AND_EXPR
9522 && integer_pow2p (TREE_OPERAND (arg0
, 1))
9523 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
9524 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
9525 arg0
, fold_convert (TREE_TYPE (arg0
),
9526 integer_zero_node
));
9528 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9529 bit, then fold the expression into A < 0 or A >= 0. */
9530 tem
= fold_single_bit_test_into_sign_test (code
, arg0
, arg1
, type
);
9534 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9535 Similarly for NE_EXPR. */
9536 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9537 && TREE_CODE (arg0
) == BIT_AND_EXPR
9538 && TREE_CODE (arg1
) == INTEGER_CST
9539 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9541 tree notc
= fold_build1 (BIT_NOT_EXPR
,
9542 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
9543 TREE_OPERAND (arg0
, 1));
9544 tree dandnotc
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
9546 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
9547 if (integer_nonzerop (dandnotc
))
9548 return omit_one_operand (type
, rslt
, arg0
);
9551 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9552 Similarly for NE_EXPR. */
9553 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9554 && TREE_CODE (arg0
) == BIT_IOR_EXPR
9555 && TREE_CODE (arg1
) == INTEGER_CST
9556 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9558 tree notd
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
9559 tree candnotd
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
9560 TREE_OPERAND (arg0
, 1), notd
);
9561 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
9562 if (integer_nonzerop (candnotd
))
9563 return omit_one_operand (type
, rslt
, arg0
);
9566 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9567 and similarly for >= into !=. */
9568 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
9569 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
9570 && TREE_CODE (arg1
) == LSHIFT_EXPR
9571 && integer_onep (TREE_OPERAND (arg1
, 0)))
9572 return build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
9573 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
9574 TREE_OPERAND (arg1
, 1)),
9575 fold_convert (TREE_TYPE (arg0
), integer_zero_node
));
9577 else if ((code
== LT_EXPR
|| code
== GE_EXPR
)
9578 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
9579 && (TREE_CODE (arg1
) == NOP_EXPR
9580 || TREE_CODE (arg1
) == CONVERT_EXPR
)
9581 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
9582 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
9584 build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
9585 fold_convert (TREE_TYPE (arg0
),
9586 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
9587 TREE_OPERAND (TREE_OPERAND (arg1
, 0),
9589 fold_convert (TREE_TYPE (arg0
), integer_zero_node
));
9591 /* Simplify comparison of something with itself. (For IEEE
9592 floating-point, we can only do some of these simplifications.) */
9593 if (operand_equal_p (arg0
, arg1
, 0))
9598 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9599 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9600 return constant_boolean_node (1, type
);
9605 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9606 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9607 return constant_boolean_node (1, type
);
9608 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
9611 /* For NE, we can only do this simplification if integer
9612 or we don't honor IEEE floating point NaNs. */
9613 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9614 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9616 /* ... fall through ... */
9619 return constant_boolean_node (0, type
);
9625 /* If we are comparing an expression that just has comparisons
9626 of two integer values, arithmetic expressions of those comparisons,
9627 and constants, we can simplify it. There are only three cases
9628 to check: the two values can either be equal, the first can be
9629 greater, or the second can be greater. Fold the expression for
9630 those three values. Since each value must be 0 or 1, we have
9631 eight possibilities, each of which corresponds to the constant 0
9632 or 1 or one of the six possible comparisons.
9634 This handles common cases like (a > b) == 0 but also handles
9635 expressions like ((x > y) - (y > x)) > 0, which supposedly
9636 occur in macroized code. */
9638 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9640 tree cval1
= 0, cval2
= 0;
9643 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9644 /* Don't handle degenerate cases here; they should already
9645 have been handled anyway. */
9646 && cval1
!= 0 && cval2
!= 0
9647 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9648 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9649 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9650 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9651 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9652 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9653 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9655 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9656 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9658 /* We can't just pass T to eval_subst in case cval1 or cval2
9659 was the same as ARG1. */
9662 = fold_build2 (code
, type
,
9663 eval_subst (arg0
, cval1
, maxval
,
9667 = fold_build2 (code
, type
,
9668 eval_subst (arg0
, cval1
, maxval
,
9672 = fold_build2 (code
, type
,
9673 eval_subst (arg0
, cval1
, minval
,
9677 /* All three of these results should be 0 or 1. Confirm they
9678 are. Then use those values to select the proper code
9681 if ((integer_zerop (high_result
)
9682 || integer_onep (high_result
))
9683 && (integer_zerop (equal_result
)
9684 || integer_onep (equal_result
))
9685 && (integer_zerop (low_result
)
9686 || integer_onep (low_result
)))
9688 /* Make a 3-bit mask with the high-order bit being the
9689 value for `>', the next for '=', and the low for '<'. */
9690 switch ((integer_onep (high_result
) * 4)
9691 + (integer_onep (equal_result
) * 2)
9692 + integer_onep (low_result
))
9696 return omit_one_operand (type
, integer_zero_node
, arg0
);
9717 return omit_one_operand (type
, integer_one_node
, arg0
);
9721 return save_expr (build2 (code
, type
, cval1
, cval2
));
9723 return fold_build2 (code
, type
, cval1
, cval2
);
9728 /* If this is a comparison of a field, we may be able to simplify it. */
9729 if (((TREE_CODE (arg0
) == COMPONENT_REF
9730 && lang_hooks
.can_use_bit_fields_p ())
9731 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
9732 && (code
== EQ_EXPR
|| code
== NE_EXPR
)
9733 /* Handle the constant case even without -O
9734 to make sure the warnings are given. */
9735 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
9737 t1
= optimize_bit_field_compare (code
, type
, arg0
, arg1
);
9742 /* Fold a comparison of the address of COMPONENT_REFs with the same
9743 type and component to a comparison of the address of the base
9744 object. In short, &x->a OP &y->a to x OP y and
9745 &x->a OP &y.a to x OP &y */
9746 if (TREE_CODE (arg0
) == ADDR_EXPR
9747 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == COMPONENT_REF
9748 && TREE_CODE (arg1
) == ADDR_EXPR
9749 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == COMPONENT_REF
)
9751 tree cref0
= TREE_OPERAND (arg0
, 0);
9752 tree cref1
= TREE_OPERAND (arg1
, 0);
9753 if (TREE_OPERAND (cref0
, 1) == TREE_OPERAND (cref1
, 1))
9755 tree op0
= TREE_OPERAND (cref0
, 0);
9756 tree op1
= TREE_OPERAND (cref1
, 0);
9757 return fold_build2 (code
, type
,
9758 build_fold_addr_expr (op0
),
9759 build_fold_addr_expr (op1
));
9763 /* Optimize comparisons of strlen vs zero to a compare of the
9764 first character of the string vs zero. To wit,
9765 strlen(ptr) == 0 => *ptr == 0
9766 strlen(ptr) != 0 => *ptr != 0
9767 Other cases should reduce to one of these two (or a constant)
9768 due to the return value of strlen being unsigned. */
9769 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9770 && integer_zerop (arg1
)
9771 && TREE_CODE (arg0
) == CALL_EXPR
)
9773 tree fndecl
= get_callee_fndecl (arg0
);
9777 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
9778 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
9779 && (arglist
= TREE_OPERAND (arg0
, 1))
9780 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) == POINTER_TYPE
9781 && ! TREE_CHAIN (arglist
))
9783 tree iref
= build_fold_indirect_ref (TREE_VALUE (arglist
));
9784 return fold_build2 (code
, type
, iref
,
9785 build_int_cst (TREE_TYPE (iref
), 0));
9789 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9790 into a single range test. */
9791 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9792 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9793 && TREE_CODE (arg1
) == INTEGER_CST
9794 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9795 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9796 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9797 && !TREE_OVERFLOW (arg1
))
9799 t1
= fold_div_compare (code
, type
, arg0
, arg1
);
9800 if (t1
!= NULL_TREE
)
9804 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9805 && integer_zerop (arg1
)
9806 && tree_expr_nonzero_p (arg0
))
9808 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
9809 return omit_one_operand (type
, res
, arg0
);
9812 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
9813 return t1
== NULL_TREE
? NULL_TREE
: t1
;
9815 case UNORDERED_EXPR
:
9823 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9825 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
9826 if (t1
!= NULL_TREE
)
9830 /* If the first operand is NaN, the result is constant. */
9831 if (TREE_CODE (arg0
) == REAL_CST
9832 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
9833 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
9835 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
9838 return omit_one_operand (type
, t1
, arg1
);
9841 /* If the second operand is NaN, the result is constant. */
9842 if (TREE_CODE (arg1
) == REAL_CST
9843 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
9844 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
9846 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
9849 return omit_one_operand (type
, t1
, arg0
);
9852 /* Simplify unordered comparison of something with itself. */
9853 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
9854 && operand_equal_p (arg0
, arg1
, 0))
9855 return constant_boolean_node (1, type
);
9857 if (code
== LTGT_EXPR
9858 && !flag_trapping_math
9859 && operand_equal_p (arg0
, arg1
, 0))
9860 return constant_boolean_node (0, type
);
9862 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9864 tree targ0
= strip_float_extensions (arg0
);
9865 tree targ1
= strip_float_extensions (arg1
);
9866 tree newtype
= TREE_TYPE (targ0
);
9868 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9869 newtype
= TREE_TYPE (targ1
);
9871 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9872 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
9873 fold_convert (newtype
, targ1
));
9879 /* When pedantic, a compound expression can be neither an lvalue
9880 nor an integer constant expression. */
9881 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
9883 /* Don't let (0, 0) be null pointer constant. */
9884 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
9885 : fold_convert (type
, arg1
);
9886 return pedantic_non_lvalue (tem
);
9890 return build_complex (type
, arg0
, arg1
);
9894 /* An ASSERT_EXPR should never be passed to fold_binary. */
9899 } /* switch (code) */
9902 /* Callback for walk_tree, looking for LABEL_EXPR.
9903 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
9904 Do not check the sub-tree of GOTO_EXPR. */
9907 contains_label_1 (tree
*tp
,
9909 void *data ATTRIBUTE_UNUSED
)
9911 switch (TREE_CODE (*tp
))
9923 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
9924 accessible from outside the sub-tree. Returns NULL_TREE if no
9925 addressable label is found. */
9928 contains_label_p (tree st
)
9930 return (walk_tree (&st
, contains_label_1
, NULL
, NULL
) != NULL_TREE
);
9933 /* Fold a ternary expression of code CODE and type TYPE with operands
9934 OP0, OP1, and OP2. Return the folded expression if folding is
9935 successful. Otherwise, return NULL_TREE. */
9938 fold_ternary (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
9941 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
9942 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9944 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9945 && TREE_CODE_LENGTH (code
) == 3);
9947 /* Strip any conversions that don't change the mode. This is safe
9948 for every expression, except for a comparison expression because
9949 its signedness is derived from its operands. So, in the latter
9950 case, only strip conversions that don't change the signedness.
9952 Note that this is done as an internal manipulation within the
9953 constant folder, in order to find the simplest representation of
9954 the arguments so that their form can be studied. In any cases,
9955 the appropriate type conversions should be put back in the tree
9956 that will get out of the constant folder. */
9972 if (TREE_CODE (arg0
) == CONSTRUCTOR
9973 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
9975 unsigned HOST_WIDE_INT idx
;
9977 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
9984 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
9985 so all simple results must be passed through pedantic_non_lvalue. */
9986 if (TREE_CODE (arg0
) == INTEGER_CST
)
9988 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
9989 tem
= integer_zerop (arg0
) ? op2
: op1
;
9990 /* Only optimize constant conditions when the selected branch
9991 has the same type as the COND_EXPR. This avoids optimizing
9992 away "c ? x : throw", where the throw has a void type.
9993 Avoid throwing away that operand which contains label. */
9994 if ((!TREE_SIDE_EFFECTS (unused_op
)
9995 || !contains_label_p (unused_op
))
9996 && (! VOID_TYPE_P (TREE_TYPE (tem
))
9997 || VOID_TYPE_P (type
)))
9998 return pedantic_non_lvalue (tem
);
10001 if (operand_equal_p (arg1
, op2
, 0))
10002 return pedantic_omit_one_operand (type
, arg1
, arg0
);
10004 /* If we have A op B ? A : C, we may be able to convert this to a
10005 simpler expression, depending on the operation and the values
10006 of B and C. Signed zeros prevent all of these transformations,
10007 for reasons given above each one.
10009 Also try swapping the arguments and inverting the conditional. */
10010 if (COMPARISON_CLASS_P (arg0
)
10011 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
10012 arg1
, TREE_OPERAND (arg0
, 1))
10013 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
10015 tem
= fold_cond_expr_with_comparison (type
, arg0
, op1
, op2
);
10020 if (COMPARISON_CLASS_P (arg0
)
10021 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
10023 TREE_OPERAND (arg0
, 1))
10024 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
10026 tem
= invert_truthvalue (arg0
);
10027 if (COMPARISON_CLASS_P (tem
))
10029 tem
= fold_cond_expr_with_comparison (type
, tem
, op2
, op1
);
10035 /* If the second operand is simpler than the third, swap them
10036 since that produces better jump optimization results. */
10037 if (truth_value_p (TREE_CODE (arg0
))
10038 && tree_swap_operands_p (op1
, op2
, false))
10040 /* See if this can be inverted. If it can't, possibly because
10041 it was a floating-point inequality comparison, don't do
10043 tem
= invert_truthvalue (arg0
);
10045 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
10046 return fold_build3 (code
, type
, tem
, op2
, op1
);
10049 /* Convert A ? 1 : 0 to simply A. */
10050 if (integer_onep (op1
)
10051 && integer_zerop (op2
)
10052 /* If we try to convert OP0 to our type, the
10053 call to fold will try to move the conversion inside
10054 a COND, which will recurse. In that case, the COND_EXPR
10055 is probably the best choice, so leave it alone. */
10056 && type
== TREE_TYPE (arg0
))
10057 return pedantic_non_lvalue (arg0
);
10059 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
10060 over COND_EXPR in cases such as floating point comparisons. */
10061 if (integer_zerop (op1
)
10062 && integer_onep (op2
)
10063 && truth_value_p (TREE_CODE (arg0
)))
10064 return pedantic_non_lvalue (fold_convert (type
,
10065 invert_truthvalue (arg0
)));
10067 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
10068 if (TREE_CODE (arg0
) == LT_EXPR
10069 && integer_zerop (TREE_OPERAND (arg0
, 1))
10070 && integer_zerop (op2
)
10071 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
10072 return fold_convert (type
, fold_build2 (BIT_AND_EXPR
,
10073 TREE_TYPE (tem
), tem
, arg1
));
10075 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
10076 already handled above. */
10077 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10078 && integer_onep (TREE_OPERAND (arg0
, 1))
10079 && integer_zerop (op2
)
10080 && integer_pow2p (arg1
))
10082 tree tem
= TREE_OPERAND (arg0
, 0);
10084 if (TREE_CODE (tem
) == RSHIFT_EXPR
10085 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
10086 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
10087 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
10088 return fold_build2 (BIT_AND_EXPR
, type
,
10089 TREE_OPERAND (tem
, 0), arg1
);
10092 /* A & N ? N : 0 is simply A & N if N is a power of two. This
10093 is probably obsolete because the first operand should be a
10094 truth value (that's why we have the two cases above), but let's
10095 leave it in until we can confirm this for all front-ends. */
10096 if (integer_zerop (op2
)
10097 && TREE_CODE (arg0
) == NE_EXPR
10098 && integer_zerop (TREE_OPERAND (arg0
, 1))
10099 && integer_pow2p (arg1
)
10100 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
10101 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
10102 arg1
, OEP_ONLY_CONST
))
10103 return pedantic_non_lvalue (fold_convert (type
,
10104 TREE_OPERAND (arg0
, 0)));
10106 /* Convert A ? B : 0 into A && B if A and B are truth values. */
10107 if (integer_zerop (op2
)
10108 && truth_value_p (TREE_CODE (arg0
))
10109 && truth_value_p (TREE_CODE (arg1
)))
10110 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, arg0
, arg1
);
10112 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
10113 if (integer_onep (op2
)
10114 && truth_value_p (TREE_CODE (arg0
))
10115 && truth_value_p (TREE_CODE (arg1
)))
10117 /* Only perform transformation if ARG0 is easily inverted. */
10118 tem
= invert_truthvalue (arg0
);
10119 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
10120 return fold_build2 (TRUTH_ORIF_EXPR
, type
, tem
, arg1
);
10123 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
10124 if (integer_zerop (arg1
)
10125 && truth_value_p (TREE_CODE (arg0
))
10126 && truth_value_p (TREE_CODE (op2
)))
10128 /* Only perform transformation if ARG0 is easily inverted. */
10129 tem
= invert_truthvalue (arg0
);
10130 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
10131 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, tem
, op2
);
10134 /* Convert A ? 1 : B into A || B if A and B are truth values. */
10135 if (integer_onep (arg1
)
10136 && truth_value_p (TREE_CODE (arg0
))
10137 && truth_value_p (TREE_CODE (op2
)))
10138 return fold_build2 (TRUTH_ORIF_EXPR
, type
, arg0
, op2
);
10143 /* Check for a built-in function. */
10144 if (TREE_CODE (op0
) == ADDR_EXPR
10145 && TREE_CODE (TREE_OPERAND (op0
, 0)) == FUNCTION_DECL
10146 && DECL_BUILT_IN (TREE_OPERAND (op0
, 0)))
10147 return fold_builtin (TREE_OPERAND (op0
, 0), op1
, false);
10150 case BIT_FIELD_REF
:
10151 if (TREE_CODE (arg0
) == VECTOR_CST
10152 && type
== TREE_TYPE (TREE_TYPE (arg0
))
10153 && host_integerp (arg1
, 1)
10154 && host_integerp (op2
, 1))
10156 unsigned HOST_WIDE_INT width
= tree_low_cst (arg1
, 1);
10157 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
10160 && simple_cst_equal (arg1
, TYPE_SIZE (type
)) == 1
10161 && (idx
% width
) == 0
10162 && (idx
= idx
/ width
)
10163 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
10165 tree elements
= TREE_VECTOR_CST_ELTS (arg0
);
10166 while (idx
-- > 0 && elements
)
10167 elements
= TREE_CHAIN (elements
);
10169 return TREE_VALUE (elements
);
10171 return fold_convert (type
, integer_zero_node
);
10178 } /* switch (code) */
10181 /* Perform constant folding and related simplification of EXPR.
10182 The related simplifications include x*1 => x, x*0 => 0, etc.,
10183 and application of the associative law.
10184 NOP_EXPR conversions may be removed freely (as long as we
10185 are careful not to change the type of the overall expression).
10186 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10187 but we can constant-fold them if they have constant operands. */
10189 #ifdef ENABLE_FOLD_CHECKING
10190 # define fold(x) fold_1 (x)
10191 static tree
fold_1 (tree
);
10197 const tree t
= expr
;
10198 enum tree_code code
= TREE_CODE (t
);
10199 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
10202 /* Return right away if a constant. */
10203 if (kind
== tcc_constant
)
10206 if (IS_EXPR_CODE_CLASS (kind
))
10208 tree type
= TREE_TYPE (t
);
10209 tree op0
, op1
, op2
;
10211 switch (TREE_CODE_LENGTH (code
))
10214 op0
= TREE_OPERAND (t
, 0);
10215 tem
= fold_unary (code
, type
, op0
);
10216 return tem
? tem
: expr
;
10218 op0
= TREE_OPERAND (t
, 0);
10219 op1
= TREE_OPERAND (t
, 1);
10220 tem
= fold_binary (code
, type
, op0
, op1
);
10221 return tem
? tem
: expr
;
10223 op0
= TREE_OPERAND (t
, 0);
10224 op1
= TREE_OPERAND (t
, 1);
10225 op2
= TREE_OPERAND (t
, 2);
10226 tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
10227 return tem
? tem
: expr
;
10236 return fold (DECL_INITIAL (t
));
10240 } /* switch (code) */
10243 #ifdef ENABLE_FOLD_CHECKING
10246 static void fold_checksum_tree (tree
, struct md5_ctx
*, htab_t
);
10247 static void fold_check_failed (tree
, tree
);
10248 void print_fold_checksum (tree
);
10250 /* When --enable-checking=fold, compute a digest of expr before
10251 and after actual fold call to see if fold did not accidentally
10252 change original expr. */
10258 struct md5_ctx ctx
;
10259 unsigned char checksum_before
[16], checksum_after
[16];
10262 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
10263 md5_init_ctx (&ctx
);
10264 fold_checksum_tree (expr
, &ctx
, ht
);
10265 md5_finish_ctx (&ctx
, checksum_before
);
10268 ret
= fold_1 (expr
);
10270 md5_init_ctx (&ctx
);
10271 fold_checksum_tree (expr
, &ctx
, ht
);
10272 md5_finish_ctx (&ctx
, checksum_after
);
10275 if (memcmp (checksum_before
, checksum_after
, 16))
10276 fold_check_failed (expr
, ret
);
10282 print_fold_checksum (tree expr
)
10284 struct md5_ctx ctx
;
10285 unsigned char checksum
[16], cnt
;
10288 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
10289 md5_init_ctx (&ctx
);
10290 fold_checksum_tree (expr
, &ctx
, ht
);
10291 md5_finish_ctx (&ctx
, checksum
);
10293 for (cnt
= 0; cnt
< 16; ++cnt
)
10294 fprintf (stderr
, "%02x", checksum
[cnt
]);
10295 putc ('\n', stderr
);
10299 fold_check_failed (tree expr ATTRIBUTE_UNUSED
, tree ret ATTRIBUTE_UNUSED
)
10301 internal_error ("fold check: original tree changed by fold");
10305 fold_checksum_tree (tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
10308 enum tree_code code
;
10309 char buf
[sizeof (struct tree_function_decl
)];
10314 gcc_assert ((sizeof (struct tree_exp
) + 5 * sizeof (tree
)
10315 <= sizeof (struct tree_function_decl
))
10316 && sizeof (struct tree_type
) <= sizeof (struct tree_function_decl
));
10319 slot
= htab_find_slot (ht
, expr
, INSERT
);
10323 code
= TREE_CODE (expr
);
10324 if (TREE_CODE_CLASS (code
) == tcc_declaration
10325 && DECL_ASSEMBLER_NAME_SET_P (expr
))
10327 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10328 memcpy (buf
, expr
, tree_size (expr
));
10330 SET_DECL_ASSEMBLER_NAME (expr
, NULL
);
10332 else if (TREE_CODE_CLASS (code
) == tcc_type
10333 && (TYPE_POINTER_TO (expr
) || TYPE_REFERENCE_TO (expr
)
10334 || TYPE_CACHED_VALUES_P (expr
)
10335 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)))
10337 /* Allow these fields to be modified. */
10338 memcpy (buf
, expr
, tree_size (expr
));
10340 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
) = 0;
10341 TYPE_POINTER_TO (expr
) = NULL
;
10342 TYPE_REFERENCE_TO (expr
) = NULL
;
10343 if (TYPE_CACHED_VALUES_P (expr
))
10345 TYPE_CACHED_VALUES_P (expr
) = 0;
10346 TYPE_CACHED_VALUES (expr
) = NULL
;
10349 md5_process_bytes (expr
, tree_size (expr
), ctx
);
10350 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
10351 if (TREE_CODE_CLASS (code
) != tcc_type
10352 && TREE_CODE_CLASS (code
) != tcc_declaration
10353 && code
!= TREE_LIST
)
10354 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
10355 switch (TREE_CODE_CLASS (code
))
10361 md5_process_bytes (TREE_STRING_POINTER (expr
),
10362 TREE_STRING_LENGTH (expr
), ctx
);
10365 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
10366 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
10369 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
10375 case tcc_exceptional
:
10379 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
10380 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
10381 expr
= TREE_CHAIN (expr
);
10382 goto recursive_label
;
10385 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
10386 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
10392 case tcc_expression
:
10393 case tcc_reference
:
10394 case tcc_comparison
:
10397 case tcc_statement
:
10398 len
= TREE_CODE_LENGTH (code
);
10399 for (i
= 0; i
< len
; ++i
)
10400 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
10402 case tcc_declaration
:
10403 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
10404 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
10405 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
10406 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
10407 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
10408 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
10409 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
10410 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
10411 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
10413 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
10415 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
10416 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
10417 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
10421 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
10422 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
10423 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
10424 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
10425 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
10426 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
10427 if (INTEGRAL_TYPE_P (expr
)
10428 || SCALAR_FLOAT_TYPE_P (expr
))
10430 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
10431 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
10433 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
10434 if (TREE_CODE (expr
) == RECORD_TYPE
10435 || TREE_CODE (expr
) == UNION_TYPE
10436 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
10437 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
10438 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
10447 /* Fold a unary tree expression with code CODE of type TYPE with an
10448 operand OP0. Return a folded expression if successful. Otherwise,
10449 return a tree expression with code CODE of type TYPE with an
10453 fold_build1_stat (enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
10456 #ifdef ENABLE_FOLD_CHECKING
10457 unsigned char checksum_before
[16], checksum_after
[16];
10458 struct md5_ctx ctx
;
10461 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
10462 md5_init_ctx (&ctx
);
10463 fold_checksum_tree (op0
, &ctx
, ht
);
10464 md5_finish_ctx (&ctx
, checksum_before
);
10468 tem
= fold_unary (code
, type
, op0
);
10470 tem
= build1_stat (code
, type
, op0 PASS_MEM_STAT
);
10472 #ifdef ENABLE_FOLD_CHECKING
10473 md5_init_ctx (&ctx
);
10474 fold_checksum_tree (op0
, &ctx
, ht
);
10475 md5_finish_ctx (&ctx
, checksum_after
);
10478 if (memcmp (checksum_before
, checksum_after
, 16))
10479 fold_check_failed (op0
, tem
);
10484 /* Fold a binary tree expression with code CODE of type TYPE with
10485 operands OP0 and OP1. Return a folded expression if successful.
10486 Otherwise, return a tree expression with code CODE of type TYPE
10487 with operands OP0 and OP1. */
10490 fold_build2_stat (enum tree_code code
, tree type
, tree op0
, tree op1
10494 #ifdef ENABLE_FOLD_CHECKING
10495 unsigned char checksum_before_op0
[16],
10496 checksum_before_op1
[16],
10497 checksum_after_op0
[16],
10498 checksum_after_op1
[16];
10499 struct md5_ctx ctx
;
10502 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
10503 md5_init_ctx (&ctx
);
10504 fold_checksum_tree (op0
, &ctx
, ht
);
10505 md5_finish_ctx (&ctx
, checksum_before_op0
);
10508 md5_init_ctx (&ctx
);
10509 fold_checksum_tree (op1
, &ctx
, ht
);
10510 md5_finish_ctx (&ctx
, checksum_before_op1
);
10514 tem
= fold_binary (code
, type
, op0
, op1
);
10516 tem
= build2_stat (code
, type
, op0
, op1 PASS_MEM_STAT
);
10518 #ifdef ENABLE_FOLD_CHECKING
10519 md5_init_ctx (&ctx
);
10520 fold_checksum_tree (op0
, &ctx
, ht
);
10521 md5_finish_ctx (&ctx
, checksum_after_op0
);
10524 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
10525 fold_check_failed (op0
, tem
);
10527 md5_init_ctx (&ctx
);
10528 fold_checksum_tree (op1
, &ctx
, ht
);
10529 md5_finish_ctx (&ctx
, checksum_after_op1
);
10532 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
10533 fold_check_failed (op1
, tem
);
10538 /* Fold a ternary tree expression with code CODE of type TYPE with
10539 operands OP0, OP1, and OP2. Return a folded expression if
10540 successful. Otherwise, return a tree expression with code CODE of
10541 type TYPE with operands OP0, OP1, and OP2. */
10544 fold_build3_stat (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
10548 #ifdef ENABLE_FOLD_CHECKING
10549 unsigned char checksum_before_op0
[16],
10550 checksum_before_op1
[16],
10551 checksum_before_op2
[16],
10552 checksum_after_op0
[16],
10553 checksum_after_op1
[16],
10554 checksum_after_op2
[16];
10555 struct md5_ctx ctx
;
10558 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
10559 md5_init_ctx (&ctx
);
10560 fold_checksum_tree (op0
, &ctx
, ht
);
10561 md5_finish_ctx (&ctx
, checksum_before_op0
);
10564 md5_init_ctx (&ctx
);
10565 fold_checksum_tree (op1
, &ctx
, ht
);
10566 md5_finish_ctx (&ctx
, checksum_before_op1
);
10569 md5_init_ctx (&ctx
);
10570 fold_checksum_tree (op2
, &ctx
, ht
);
10571 md5_finish_ctx (&ctx
, checksum_before_op2
);
10575 tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
10577 tem
= build3_stat (code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
10579 #ifdef ENABLE_FOLD_CHECKING
10580 md5_init_ctx (&ctx
);
10581 fold_checksum_tree (op0
, &ctx
, ht
);
10582 md5_finish_ctx (&ctx
, checksum_after_op0
);
10585 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
10586 fold_check_failed (op0
, tem
);
10588 md5_init_ctx (&ctx
);
10589 fold_checksum_tree (op1
, &ctx
, ht
);
10590 md5_finish_ctx (&ctx
, checksum_after_op1
);
10593 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
10594 fold_check_failed (op1
, tem
);
10596 md5_init_ctx (&ctx
);
10597 fold_checksum_tree (op2
, &ctx
, ht
);
10598 md5_finish_ctx (&ctx
, checksum_after_op2
);
10601 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
10602 fold_check_failed (op2
, tem
);
10607 /* Perform constant folding and related simplification of initializer
10608 expression EXPR. These behave identically to "fold_buildN" but ignore
10609 potential run-time traps and exceptions that fold must preserve. */
10611 #define START_FOLD_INIT \
10612 int saved_signaling_nans = flag_signaling_nans;\
10613 int saved_trapping_math = flag_trapping_math;\
10614 int saved_rounding_math = flag_rounding_math;\
10615 int saved_trapv = flag_trapv;\
10616 flag_signaling_nans = 0;\
10617 flag_trapping_math = 0;\
10618 flag_rounding_math = 0;\
10621 #define END_FOLD_INIT \
10622 flag_signaling_nans = saved_signaling_nans;\
10623 flag_trapping_math = saved_trapping_math;\
10624 flag_rounding_math = saved_rounding_math;\
10625 flag_trapv = saved_trapv
10628 fold_build1_initializer (enum tree_code code
, tree type
, tree op
)
10633 result
= fold_build1 (code
, type
, op
);
10640 fold_build2_initializer (enum tree_code code
, tree type
, tree op0
, tree op1
)
10645 result
= fold_build2 (code
, type
, op0
, op1
);
10652 fold_build3_initializer (enum tree_code code
, tree type
, tree op0
, tree op1
,
10658 result
= fold_build3 (code
, type
, op0
, op1
, op2
);
10664 #undef START_FOLD_INIT
10665 #undef END_FOLD_INIT
10667 /* Determine if first argument is a multiple of second argument. Return 0 if
10668 it is not, or we cannot easily determined it to be.
10670 An example of the sort of thing we care about (at this point; this routine
10671 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10672 fold cases do now) is discovering that
10674 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10680 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10682 This code also handles discovering that
10684 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10686 is a multiple of 8 so we don't have to worry about dealing with a
10687 possible remainder.
10689 Note that we *look* inside a SAVE_EXPR only to determine how it was
10690 calculated; it is not safe for fold to do much of anything else with the
10691 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10692 at run time. For example, the latter example above *cannot* be implemented
10693 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10694 evaluation time of the original SAVE_EXPR is not necessarily the same at
10695 the time the new expression is evaluated. The only optimization of this
10696 sort that would be valid is changing
10698 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10702 SAVE_EXPR (I) * SAVE_EXPR (J)
10704 (where the same SAVE_EXPR (J) is used in the original and the
10705 transformed version). */
10708 multiple_of_p (tree type
, tree top
, tree bottom
)
10710 if (operand_equal_p (top
, bottom
, 0))
10713 if (TREE_CODE (type
) != INTEGER_TYPE
)
10716 switch (TREE_CODE (top
))
10719 /* Bitwise and provides a power of two multiple. If the mask is
10720 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10721 if (!integer_pow2p (bottom
))
10726 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
10727 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
10731 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
10732 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
10735 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
10739 op1
= TREE_OPERAND (top
, 1);
10740 /* const_binop may not detect overflow correctly,
10741 so check for it explicitly here. */
10742 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
10743 > TREE_INT_CST_LOW (op1
)
10744 && TREE_INT_CST_HIGH (op1
) == 0
10745 && 0 != (t1
= fold_convert (type
,
10746 const_binop (LSHIFT_EXPR
,
10749 && ! TREE_OVERFLOW (t1
))
10750 return multiple_of_p (type
, t1
, bottom
);
10755 /* Can't handle conversions from non-integral or wider integral type. */
10756 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
10757 || (TYPE_PRECISION (type
)
10758 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
10761 /* .. fall through ... */
10764 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
10767 if (TREE_CODE (bottom
) != INTEGER_CST
10768 || (TYPE_UNSIGNED (type
)
10769 && (tree_int_cst_sgn (top
) < 0
10770 || tree_int_cst_sgn (bottom
) < 0)))
10772 return integer_zerop (const_binop (TRUNC_MOD_EXPR
,
10780 /* Return true if `t' is known to be non-negative. */
10783 tree_expr_nonnegative_p (tree t
)
10785 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
10788 switch (TREE_CODE (t
))
10791 /* We can't return 1 if flag_wrapv is set because
10792 ABS_EXPR<INT_MIN> = INT_MIN. */
10793 if (!(flag_wrapv
&& INTEGRAL_TYPE_P (TREE_TYPE (t
))))
10798 return tree_int_cst_sgn (t
) >= 0;
10801 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
10804 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
10805 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10806 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10808 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10809 both unsigned and at least 2 bits shorter than the result. */
10810 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
10811 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
10812 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
10814 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
10815 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
10816 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
10817 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
10819 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
10820 TYPE_PRECISION (inner2
)) + 1;
10821 return prec
< TYPE_PRECISION (TREE_TYPE (t
));
10827 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
10829 /* x * x for floating point x is always non-negative. */
10830 if (operand_equal_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1), 0))
10832 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10833 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10836 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10837 both unsigned and their total bits is shorter than the result. */
10838 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
10839 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
10840 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
10842 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
10843 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
10844 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
10845 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
10846 return TYPE_PRECISION (inner1
) + TYPE_PRECISION (inner2
)
10847 < TYPE_PRECISION (TREE_TYPE (t
));
10853 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10854 || tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10860 case TRUNC_DIV_EXPR
:
10861 case CEIL_DIV_EXPR
:
10862 case FLOOR_DIV_EXPR
:
10863 case ROUND_DIV_EXPR
:
10864 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10865 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10867 case TRUNC_MOD_EXPR
:
10868 case CEIL_MOD_EXPR
:
10869 case FLOOR_MOD_EXPR
:
10870 case ROUND_MOD_EXPR
:
10872 case NON_LVALUE_EXPR
:
10874 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
10876 case COMPOUND_EXPR
:
10878 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10881 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t
, 1)));
10884 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1))
10885 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 2));
10889 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
10890 tree outer_type
= TREE_TYPE (t
);
10892 if (TREE_CODE (outer_type
) == REAL_TYPE
)
10894 if (TREE_CODE (inner_type
) == REAL_TYPE
)
10895 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
10896 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
10898 if (TYPE_UNSIGNED (inner_type
))
10900 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
10903 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
10905 if (TREE_CODE (inner_type
) == REAL_TYPE
)
10906 return tree_expr_nonnegative_p (TREE_OPERAND (t
,0));
10907 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
10908 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
10909 && TYPE_UNSIGNED (inner_type
);
10916 tree temp
= TARGET_EXPR_SLOT (t
);
10917 t
= TARGET_EXPR_INITIAL (t
);
10919 /* If the initializer is non-void, then it's a normal expression
10920 that will be assigned to the slot. */
10921 if (!VOID_TYPE_P (t
))
10922 return tree_expr_nonnegative_p (t
);
10924 /* Otherwise, the initializer sets the slot in some way. One common
10925 way is an assignment statement at the end of the initializer. */
10928 if (TREE_CODE (t
) == BIND_EXPR
)
10929 t
= expr_last (BIND_EXPR_BODY (t
));
10930 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
10931 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
10932 t
= expr_last (TREE_OPERAND (t
, 0));
10933 else if (TREE_CODE (t
) == STATEMENT_LIST
)
10938 if (TREE_CODE (t
) == MODIFY_EXPR
10939 && TREE_OPERAND (t
, 0) == temp
)
10940 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10947 tree fndecl
= get_callee_fndecl (t
);
10948 tree arglist
= TREE_OPERAND (t
, 1);
10949 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
10950 switch (DECL_FUNCTION_CODE (fndecl
))
10952 #define CASE_BUILTIN_F(BUILT_IN_FN) \
10953 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
10954 #define CASE_BUILTIN_I(BUILT_IN_FN) \
10955 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
10957 CASE_BUILTIN_F (BUILT_IN_ACOS
)
10958 CASE_BUILTIN_F (BUILT_IN_ACOSH
)
10959 CASE_BUILTIN_F (BUILT_IN_CABS
)
10960 CASE_BUILTIN_F (BUILT_IN_COSH
)
10961 CASE_BUILTIN_F (BUILT_IN_ERFC
)
10962 CASE_BUILTIN_F (BUILT_IN_EXP
)
10963 CASE_BUILTIN_F (BUILT_IN_EXP10
)
10964 CASE_BUILTIN_F (BUILT_IN_EXP2
)
10965 CASE_BUILTIN_F (BUILT_IN_FABS
)
10966 CASE_BUILTIN_F (BUILT_IN_FDIM
)
10967 CASE_BUILTIN_F (BUILT_IN_HYPOT
)
10968 CASE_BUILTIN_F (BUILT_IN_POW10
)
10969 CASE_BUILTIN_I (BUILT_IN_FFS
)
10970 CASE_BUILTIN_I (BUILT_IN_PARITY
)
10971 CASE_BUILTIN_I (BUILT_IN_POPCOUNT
)
10975 CASE_BUILTIN_F (BUILT_IN_SQRT
)
10976 /* sqrt(-0.0) is -0.0. */
10977 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t
))))
10979 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
10981 CASE_BUILTIN_F (BUILT_IN_ASINH
)
10982 CASE_BUILTIN_F (BUILT_IN_ATAN
)
10983 CASE_BUILTIN_F (BUILT_IN_ATANH
)
10984 CASE_BUILTIN_F (BUILT_IN_CBRT
)
10985 CASE_BUILTIN_F (BUILT_IN_CEIL
)
10986 CASE_BUILTIN_F (BUILT_IN_ERF
)
10987 CASE_BUILTIN_F (BUILT_IN_EXPM1
)
10988 CASE_BUILTIN_F (BUILT_IN_FLOOR
)
10989 CASE_BUILTIN_F (BUILT_IN_FMOD
)
10990 CASE_BUILTIN_F (BUILT_IN_FREXP
)
10991 CASE_BUILTIN_F (BUILT_IN_LCEIL
)
10992 CASE_BUILTIN_F (BUILT_IN_LDEXP
)
10993 CASE_BUILTIN_F (BUILT_IN_LFLOOR
)
10994 CASE_BUILTIN_F (BUILT_IN_LLCEIL
)
10995 CASE_BUILTIN_F (BUILT_IN_LLFLOOR
)
10996 CASE_BUILTIN_F (BUILT_IN_LLRINT
)
10997 CASE_BUILTIN_F (BUILT_IN_LLROUND
)
10998 CASE_BUILTIN_F (BUILT_IN_LRINT
)
10999 CASE_BUILTIN_F (BUILT_IN_LROUND
)
11000 CASE_BUILTIN_F (BUILT_IN_MODF
)
11001 CASE_BUILTIN_F (BUILT_IN_NEARBYINT
)
11002 CASE_BUILTIN_F (BUILT_IN_POW
)
11003 CASE_BUILTIN_F (BUILT_IN_RINT
)
11004 CASE_BUILTIN_F (BUILT_IN_ROUND
)
11005 CASE_BUILTIN_F (BUILT_IN_SIGNBIT
)
11006 CASE_BUILTIN_F (BUILT_IN_SINH
)
11007 CASE_BUILTIN_F (BUILT_IN_TANH
)
11008 CASE_BUILTIN_F (BUILT_IN_TRUNC
)
11009 /* True if the 1st argument is nonnegative. */
11010 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
11012 CASE_BUILTIN_F (BUILT_IN_FMAX
)
11013 /* True if the 1st OR 2nd arguments are nonnegative. */
11014 return tree_expr_nonnegative_p (TREE_VALUE (arglist
))
11015 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
11017 CASE_BUILTIN_F (BUILT_IN_FMIN
)
11018 /* True if the 1st AND 2nd arguments are nonnegative. */
11019 return tree_expr_nonnegative_p (TREE_VALUE (arglist
))
11020 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
11022 CASE_BUILTIN_F (BUILT_IN_COPYSIGN
)
11023 /* True if the 2nd argument is nonnegative. */
11024 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
11028 #undef CASE_BUILTIN_F
11029 #undef CASE_BUILTIN_I
11033 /* ... fall through ... */
11036 if (truth_value_p (TREE_CODE (t
)))
11037 /* Truth values evaluate to 0 or 1, which is nonnegative. */
11041 /* We don't know sign of `t', so be conservative and return false. */
11045 /* Return true when T is an address and is known to be nonzero.
11046 For floating point we further ensure that T is not denormal.
11047 Similar logic is present in nonzero_address in rtlanal.h. */
11050 tree_expr_nonzero_p (tree t
)
11052 tree type
= TREE_TYPE (t
);
11054 /* Doing something useful for floating point would need more work. */
11055 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
11058 switch (TREE_CODE (t
))
11061 return tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
11064 /* We used to test for !integer_zerop here. This does not work correctly
11065 if TREE_CONSTANT_OVERFLOW (t). */
11066 return (TREE_INT_CST_LOW (t
) != 0
11067 || TREE_INT_CST_HIGH (t
) != 0);
11070 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
11072 /* With the presence of negative values it is hard
11073 to say something. */
11074 if (!tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
11075 || !tree_expr_nonnegative_p (TREE_OPERAND (t
, 1)))
11077 /* One of operands must be positive and the other non-negative. */
11078 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
11079 || tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
11084 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
11086 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
11087 && tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
11093 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
11094 tree outer_type
= TREE_TYPE (t
);
11096 return (TYPE_PRECISION (inner_type
) >= TYPE_PRECISION (outer_type
)
11097 && tree_expr_nonzero_p (TREE_OPERAND (t
, 0)));
11103 tree base
= get_base_address (TREE_OPERAND (t
, 0));
11108 /* Weak declarations may link to NULL. */
11109 if (VAR_OR_FUNCTION_DECL_P (base
))
11110 return !DECL_WEAK (base
);
11112 /* Constants are never weak. */
11113 if (CONSTANT_CLASS_P (base
))
11120 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
11121 && tree_expr_nonzero_p (TREE_OPERAND (t
, 2)));
11124 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
11125 && tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
11128 if (tree_expr_nonzero_p (TREE_OPERAND (t
, 0)))
11130 /* When both operands are nonzero, then MAX must be too. */
11131 if (tree_expr_nonzero_p (TREE_OPERAND (t
, 1)))
11134 /* MAX where operand 0 is positive is positive. */
11135 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
11137 /* MAX where operand 1 is positive is positive. */
11138 else if (tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
11139 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1)))
11143 case COMPOUND_EXPR
:
11146 return tree_expr_nonzero_p (TREE_OPERAND (t
, 1));
11149 case NON_LVALUE_EXPR
:
11150 return tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
11153 return tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
11154 || tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
11157 return alloca_call_p (t
);
11165 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
11166 attempt to fold the expression to a constant without modifying TYPE,
11169 If the expression could be simplified to a constant, then return
11170 the constant. If the expression would not be simplified to a
11171 constant, then return NULL_TREE. */
11174 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
11176 tree tem
= fold_binary (code
, type
, op0
, op1
);
11177 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
11180 /* Given the components of a unary expression CODE, TYPE and OP0,
11181 attempt to fold the expression to a constant without modifying
11184 If the expression could be simplified to a constant, then return
11185 the constant. If the expression would not be simplified to a
11186 constant, then return NULL_TREE. */
11189 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
11191 tree tem
= fold_unary (code
, type
, op0
);
11192 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
11195 /* If EXP represents referencing an element in a constant string
11196 (either via pointer arithmetic or array indexing), return the
11197 tree representing the value accessed, otherwise return NULL. */
11200 fold_read_from_constant_string (tree exp
)
11202 if (TREE_CODE (exp
) == INDIRECT_REF
|| TREE_CODE (exp
) == ARRAY_REF
)
11204 tree exp1
= TREE_OPERAND (exp
, 0);
11208 if (TREE_CODE (exp
) == INDIRECT_REF
)
11209 string
= string_constant (exp1
, &index
);
11212 tree low_bound
= array_ref_low_bound (exp
);
11213 index
= fold_convert (sizetype
, TREE_OPERAND (exp
, 1));
11215 /* Optimize the special-case of a zero lower bound.
11217 We convert the low_bound to sizetype to avoid some problems
11218 with constant folding. (E.g. suppose the lower bound is 1,
11219 and its mode is QI. Without the conversion,l (ARRAY
11220 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11221 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11222 if (! integer_zerop (low_bound
))
11223 index
= size_diffop (index
, fold_convert (sizetype
, low_bound
));
11229 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (string
))
11230 && TREE_CODE (string
) == STRING_CST
11231 && TREE_CODE (index
) == INTEGER_CST
11232 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
11233 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
11235 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
11236 return fold_convert (TREE_TYPE (exp
),
11237 build_int_cst (NULL_TREE
,
11238 (TREE_STRING_POINTER (string
)
11239 [TREE_INT_CST_LOW (index
)])));
11244 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11245 an integer constant or real constant.
11247 TYPE is the type of the result. */
11250 fold_negate_const (tree arg0
, tree type
)
11252 tree t
= NULL_TREE
;
11254 switch (TREE_CODE (arg0
))
11258 unsigned HOST_WIDE_INT low
;
11259 HOST_WIDE_INT high
;
11260 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
11261 TREE_INT_CST_HIGH (arg0
),
11263 t
= build_int_cst_wide (type
, low
, high
);
11264 t
= force_fit_type (t
, 1,
11265 (overflow
| TREE_OVERFLOW (arg0
))
11266 && !TYPE_UNSIGNED (type
),
11267 TREE_CONSTANT_OVERFLOW (arg0
));
11272 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
11276 gcc_unreachable ();
11282 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11283 an integer constant or real constant.
11285 TYPE is the type of the result. */
11288 fold_abs_const (tree arg0
, tree type
)
11290 tree t
= NULL_TREE
;
11292 switch (TREE_CODE (arg0
))
11295 /* If the value is unsigned, then the absolute value is
11296 the same as the ordinary value. */
11297 if (TYPE_UNSIGNED (type
))
11299 /* Similarly, if the value is non-negative. */
11300 else if (INT_CST_LT (integer_minus_one_node
, arg0
))
11302 /* If the value is negative, then the absolute value is
11306 unsigned HOST_WIDE_INT low
;
11307 HOST_WIDE_INT high
;
11308 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
11309 TREE_INT_CST_HIGH (arg0
),
11311 t
= build_int_cst_wide (type
, low
, high
);
11312 t
= force_fit_type (t
, -1, overflow
| TREE_OVERFLOW (arg0
),
11313 TREE_CONSTANT_OVERFLOW (arg0
));
11318 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
11319 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
11325 gcc_unreachable ();
11331 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11332 constant. TYPE is the type of the result. */
11335 fold_not_const (tree arg0
, tree type
)
11337 tree t
= NULL_TREE
;
11339 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
11341 t
= build_int_cst_wide (type
,
11342 ~ TREE_INT_CST_LOW (arg0
),
11343 ~ TREE_INT_CST_HIGH (arg0
));
11344 t
= force_fit_type (t
, 0, TREE_OVERFLOW (arg0
),
11345 TREE_CONSTANT_OVERFLOW (arg0
));
11350 /* Given CODE, a relational operator, the target type, TYPE and two
11351 constant operands OP0 and OP1, return the result of the
11352 relational operation. If the result is not a compile time
11353 constant, then return NULL_TREE. */
11356 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
11358 int result
, invert
;
11360 /* From here on, the only cases we handle are when the result is
11361 known to be a constant. */
11363 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
11365 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
11366 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
11368 /* Handle the cases where either operand is a NaN. */
11369 if (real_isnan (c0
) || real_isnan (c1
))
11379 case UNORDERED_EXPR
:
11393 if (flag_trapping_math
)
11399 gcc_unreachable ();
11402 return constant_boolean_node (result
, type
);
11405 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
11408 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11410 To compute GT, swap the arguments and do LT.
11411 To compute GE, do LT and invert the result.
11412 To compute LE, swap the arguments, do LT and invert the result.
11413 To compute NE, do EQ and invert the result.
11415 Therefore, the code below must handle only EQ and LT. */
11417 if (code
== LE_EXPR
|| code
== GT_EXPR
)
11422 code
= swap_tree_comparison (code
);
11425 /* Note that it is safe to invert for real values here because we
11426 have already handled the one case that it matters. */
11429 if (code
== NE_EXPR
|| code
== GE_EXPR
)
11432 code
= invert_tree_comparison (code
, false);
11435 /* Compute a result for LT or EQ if args permit;
11436 Otherwise return T. */
11437 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
11439 if (code
== EQ_EXPR
)
11440 result
= tree_int_cst_equal (op0
, op1
);
11441 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
11442 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
11444 result
= INT_CST_LT (op0
, op1
);
11451 return constant_boolean_node (result
, type
);
11454 /* Build an expression for the a clean point containing EXPR with type TYPE.
11455 Don't build a cleanup point expression for EXPR which don't have side
11459 fold_build_cleanup_point_expr (tree type
, tree expr
)
11461 /* If the expression does not have side effects then we don't have to wrap
11462 it with a cleanup point expression. */
11463 if (!TREE_SIDE_EFFECTS (expr
))
11466 /* If the expression is a return, check to see if the expression inside the
11467 return has no side effects or the right hand side of the modify expression
11468 inside the return. If either don't have side effects set we don't need to
11469 wrap the expression in a cleanup point expression. Note we don't check the
11470 left hand side of the modify because it should always be a return decl. */
11471 if (TREE_CODE (expr
) == RETURN_EXPR
)
11473 tree op
= TREE_OPERAND (expr
, 0);
11474 if (!op
|| !TREE_SIDE_EFFECTS (op
))
11476 op
= TREE_OPERAND (op
, 1);
11477 if (!TREE_SIDE_EFFECTS (op
))
11481 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
11484 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11485 avoid confusing the gimplify process. */
11488 build_fold_addr_expr_with_type (tree t
, tree ptrtype
)
11490 /* The size of the object is not relevant when talking about its address. */
11491 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
11492 t
= TREE_OPERAND (t
, 0);
11494 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11495 if (TREE_CODE (t
) == INDIRECT_REF
11496 || TREE_CODE (t
) == MISALIGNED_INDIRECT_REF
)
11498 t
= TREE_OPERAND (t
, 0);
11499 if (TREE_TYPE (t
) != ptrtype
)
11500 t
= build1 (NOP_EXPR
, ptrtype
, t
);
11506 while (handled_component_p (base
))
11507 base
= TREE_OPERAND (base
, 0);
11509 TREE_ADDRESSABLE (base
) = 1;
11511 t
= build1 (ADDR_EXPR
, ptrtype
, t
);
11518 build_fold_addr_expr (tree t
)
11520 return build_fold_addr_expr_with_type (t
, build_pointer_type (TREE_TYPE (t
)));
11523 /* Given a pointer value OP0 and a type TYPE, return a simplified version
11524 of an indirection through OP0, or NULL_TREE if no simplification is
11528 fold_indirect_ref_1 (tree type
, tree op0
)
11534 subtype
= TREE_TYPE (sub
);
11535 if (!POINTER_TYPE_P (subtype
))
11538 if (TREE_CODE (sub
) == ADDR_EXPR
)
11540 tree op
= TREE_OPERAND (sub
, 0);
11541 tree optype
= TREE_TYPE (op
);
11543 if (type
== optype
)
11545 /* *(foo *)&fooarray => fooarray[0] */
11546 else if (TREE_CODE (optype
) == ARRAY_TYPE
11547 && type
== TREE_TYPE (optype
))
11549 tree type_domain
= TYPE_DOMAIN (optype
);
11550 tree min_val
= size_zero_node
;
11551 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
11552 min_val
= TYPE_MIN_VALUE (type_domain
);
11553 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
11557 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11558 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
11559 && type
== TREE_TYPE (TREE_TYPE (subtype
)))
11562 tree min_val
= size_zero_node
;
11563 sub
= build_fold_indirect_ref (sub
);
11564 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
11565 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
11566 min_val
= TYPE_MIN_VALUE (type_domain
);
11567 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
11573 /* Builds an expression for an indirection through T, simplifying some
11577 build_fold_indirect_ref (tree t
)
11579 tree type
= TREE_TYPE (TREE_TYPE (t
));
11580 tree sub
= fold_indirect_ref_1 (type
, t
);
11585 return build1 (INDIRECT_REF
, type
, t
);
11588 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11591 fold_indirect_ref (tree t
)
11593 tree sub
= fold_indirect_ref_1 (TREE_TYPE (t
), TREE_OPERAND (t
, 0));
11601 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11602 whose result is ignored. The type of the returned tree need not be
11603 the same as the original expression. */
11606 fold_ignored_result (tree t
)
11608 if (!TREE_SIDE_EFFECTS (t
))
11609 return integer_zero_node
;
11612 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
11615 t
= TREE_OPERAND (t
, 0);
11619 case tcc_comparison
:
11620 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
11621 t
= TREE_OPERAND (t
, 0);
11622 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
11623 t
= TREE_OPERAND (t
, 1);
11628 case tcc_expression
:
11629 switch (TREE_CODE (t
))
11631 case COMPOUND_EXPR
:
11632 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
11634 t
= TREE_OPERAND (t
, 0);
11638 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
11639 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
11641 t
= TREE_OPERAND (t
, 0);
11654 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11655 This can only be applied to objects of a sizetype. */
11658 round_up (tree value
, int divisor
)
11660 tree div
= NULL_TREE
;
11662 gcc_assert (divisor
> 0);
11666 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11667 have to do anything. Only do this when we are not given a const,
11668 because in that case, this check is more expensive than just
11670 if (TREE_CODE (value
) != INTEGER_CST
)
11672 div
= build_int_cst (TREE_TYPE (value
), divisor
);
11674 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
11678 /* If divisor is a power of two, simplify this to bit manipulation. */
11679 if (divisor
== (divisor
& -divisor
))
11683 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
11684 value
= size_binop (PLUS_EXPR
, value
, t
);
11685 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
11686 value
= size_binop (BIT_AND_EXPR
, value
, t
);
11691 div
= build_int_cst (TREE_TYPE (value
), divisor
);
11692 value
= size_binop (CEIL_DIV_EXPR
, value
, div
);
11693 value
= size_binop (MULT_EXPR
, value
, div
);
11699 /* Likewise, but round down. */
11702 round_down (tree value
, int divisor
)
11704 tree div
= NULL_TREE
;
11706 gcc_assert (divisor
> 0);
11710 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11711 have to do anything. Only do this when we are not given a const,
11712 because in that case, this check is more expensive than just
11714 if (TREE_CODE (value
) != INTEGER_CST
)
11716 div
= build_int_cst (TREE_TYPE (value
), divisor
);
11718 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
11722 /* If divisor is a power of two, simplify this to bit manipulation. */
11723 if (divisor
== (divisor
& -divisor
))
11727 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
11728 value
= size_binop (BIT_AND_EXPR
, value
, t
);
11733 div
= build_int_cst (TREE_TYPE (value
), divisor
);
11734 value
= size_binop (FLOOR_DIV_EXPR
, value
, div
);
11735 value
= size_binop (MULT_EXPR
, value
, div
);
11741 /* Returns the pointer to the base of the object addressed by EXP and
11742 extracts the information about the offset of the access, storing it
11743 to PBITPOS and POFFSET. */
11746 split_address_to_core_and_offset (tree exp
,
11747 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
11750 enum machine_mode mode
;
11751 int unsignedp
, volatilep
;
11752 HOST_WIDE_INT bitsize
;
11754 if (TREE_CODE (exp
) == ADDR_EXPR
)
11756 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
11757 poffset
, &mode
, &unsignedp
, &volatilep
,
11759 core
= build_fold_addr_expr (core
);
11765 *poffset
= NULL_TREE
;
11771 /* Returns true if addresses of E1 and E2 differ by a constant, false
11772 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11775 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
11778 HOST_WIDE_INT bitpos1
, bitpos2
;
11779 tree toffset1
, toffset2
, tdiff
, type
;
11781 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
11782 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
11784 if (bitpos1
% BITS_PER_UNIT
!= 0
11785 || bitpos2
% BITS_PER_UNIT
!= 0
11786 || !operand_equal_p (core1
, core2
, 0))
11789 if (toffset1
&& toffset2
)
11791 type
= TREE_TYPE (toffset1
);
11792 if (type
!= TREE_TYPE (toffset2
))
11793 toffset2
= fold_convert (type
, toffset2
);
11795 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
11796 if (!cst_and_fits_in_hwi (tdiff
))
11799 *diff
= int_cst_value (tdiff
);
11801 else if (toffset1
|| toffset2
)
11803 /* If only one of the offsets is non-constant, the difference cannot
11810 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
11814 /* Simplify the floating point expression EXP when the sign of the
11815 result is not significant. Return NULL_TREE if no simplification
11819 fold_strip_sign_ops (tree exp
)
11823 switch (TREE_CODE (exp
))
11827 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
11828 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
11832 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
11834 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
11835 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
11836 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
11837 return fold_build2 (TREE_CODE (exp
), TREE_TYPE (exp
),
11838 arg0
? arg0
: TREE_OPERAND (exp
, 0),
11839 arg1
? arg1
: TREE_OPERAND (exp
, 1));