fold-const.c (negate_expr_p): MULT_EXPRs and RDIV_EXPRs are easy to negate if either...
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "real.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "toplev.h"
56 #include "ggc.h"
57 #include "hashtab.h"
58 #include "langhooks.h"
59 #include "md5.h"
60
61 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
62 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
63 static bool negate_expr_p (tree);
64 static tree negate_expr (tree);
65 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
66 static tree associate_trees (tree, tree, enum tree_code, tree);
67 static tree int_const_binop (enum tree_code, tree, tree, int);
68 static tree const_binop (enum tree_code, tree, tree, int);
69 static hashval_t size_htab_hash (const void *);
70 static int size_htab_eq (const void *, const void *);
71 static tree fold_convert (tree, tree);
72 static enum tree_code invert_tree_comparison (enum tree_code);
73 static enum tree_code swap_tree_comparison (enum tree_code);
74 static int comparison_to_compcode (enum tree_code);
75 static enum tree_code compcode_to_comparison (int);
76 static int truth_value_p (enum tree_code);
77 static int operand_equal_for_comparison_p (tree, tree, tree);
78 static int twoval_comparison_p (tree, tree *, tree *, int *);
79 static tree eval_subst (tree, tree, tree, tree, tree);
80 static tree pedantic_omit_one_operand (tree, tree, tree);
81 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
82 static tree make_bit_field_ref (tree, tree, int, int, int);
83 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
84 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
85 enum machine_mode *, int *, int *,
86 tree *, tree *);
87 static int all_ones_mask_p (tree, int);
88 static tree sign_bit_p (tree, tree);
89 static int simple_operand_p (tree);
90 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
91 static tree make_range (tree, int *, tree *, tree *);
92 static tree build_range_check (tree, tree, int, tree, tree);
93 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
94 tree);
95 static tree fold_range_test (tree);
96 static tree unextend (tree, int, int, tree);
97 static tree fold_truthop (enum tree_code, tree, tree, tree);
98 static tree optimize_minmax_comparison (tree);
99 static tree extract_muldiv (tree, tree, enum tree_code, tree);
100 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
101 static tree strip_compound_expr (tree, tree);
102 static int multiple_of_p (tree, tree, tree);
103 static tree constant_boolean_node (int, tree);
104 static int count_cond (tree, int);
105 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
106 tree, int);
107 static bool fold_real_zero_addition_p (tree, tree, int);
108 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
109 tree, tree, tree);
110 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
111
112 /* The following constants represent a bit based encoding of GCC's
113 comparison operators. This encoding simplifies transformations
114 on relational comparison operators, such as AND and OR. */
115 #define COMPCODE_FALSE 0
116 #define COMPCODE_LT 1
117 #define COMPCODE_EQ 2
118 #define COMPCODE_LE 3
119 #define COMPCODE_GT 4
120 #define COMPCODE_NE 5
121 #define COMPCODE_GE 6
122 #define COMPCODE_TRUE 7
123
124 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
125 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
126 and SUM1. Then this yields nonzero if overflow occurred during the
127 addition.
128
129 Overflow occurs if A and B have the same sign, but A and SUM differ in
130 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
131 sign. */
132 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
133 \f
134 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
135 We do that by representing the two-word integer in 4 words, with only
136 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
137 number. The value of the word is LOWPART + HIGHPART * BASE. */
138
139 #define LOWPART(x) \
140 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
141 #define HIGHPART(x) \
142 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
143 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
144
145 /* Unpack a two-word integer into 4 words.
146 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
147 WORDS points to the array of HOST_WIDE_INTs. */
148
149 static void
150 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
151 {
152 words[0] = LOWPART (low);
153 words[1] = HIGHPART (low);
154 words[2] = LOWPART (hi);
155 words[3] = HIGHPART (hi);
156 }
157
158 /* Pack an array of 4 words into a two-word integer.
159 WORDS points to the array of words.
160 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
161
162 static void
163 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
164 HOST_WIDE_INT *hi)
165 {
166 *low = words[0] + words[1] * BASE;
167 *hi = words[2] + words[3] * BASE;
168 }
169 \f
170 /* Make the integer constant T valid for its type by setting to 0 or 1 all
171 the bits in the constant that don't belong in the type.
172
173 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
174 nonzero, a signed overflow has already occurred in calculating T, so
175 propagate it. */
176
177 int
178 force_fit_type (tree t, int overflow)
179 {
180 unsigned HOST_WIDE_INT low;
181 HOST_WIDE_INT high;
182 unsigned int prec;
183
184 if (TREE_CODE (t) == REAL_CST)
185 {
186 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
187 Consider doing it via real_convert now. */
188 return overflow;
189 }
190
191 else if (TREE_CODE (t) != INTEGER_CST)
192 return overflow;
193
194 low = TREE_INT_CST_LOW (t);
195 high = TREE_INT_CST_HIGH (t);
196
197 if (POINTER_TYPE_P (TREE_TYPE (t))
198 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
199 prec = POINTER_SIZE;
200 else
201 prec = TYPE_PRECISION (TREE_TYPE (t));
202
203 /* First clear all bits that are beyond the type's precision. */
204
205 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
206 ;
207 else if (prec > HOST_BITS_PER_WIDE_INT)
208 TREE_INT_CST_HIGH (t)
209 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
210 else
211 {
212 TREE_INT_CST_HIGH (t) = 0;
213 if (prec < HOST_BITS_PER_WIDE_INT)
214 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
215 }
216
217 /* Unsigned types do not suffer sign extension or overflow unless they
218 are a sizetype. */
219 if (TREE_UNSIGNED (TREE_TYPE (t))
220 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
221 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
222 return overflow;
223
224 /* If the value's sign bit is set, extend the sign. */
225 if (prec != 2 * HOST_BITS_PER_WIDE_INT
226 && (prec > HOST_BITS_PER_WIDE_INT
227 ? 0 != (TREE_INT_CST_HIGH (t)
228 & ((HOST_WIDE_INT) 1
229 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
230 : 0 != (TREE_INT_CST_LOW (t)
231 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
232 {
233 /* Value is negative:
234 set to 1 all the bits that are outside this type's precision. */
235 if (prec > HOST_BITS_PER_WIDE_INT)
236 TREE_INT_CST_HIGH (t)
237 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
238 else
239 {
240 TREE_INT_CST_HIGH (t) = -1;
241 if (prec < HOST_BITS_PER_WIDE_INT)
242 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
243 }
244 }
245
246 /* Return nonzero if signed overflow occurred. */
247 return
248 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
249 != 0);
250 }
251 \f
252 /* Add two doubleword integers with doubleword result.
253 Each argument is given as two `HOST_WIDE_INT' pieces.
254 One argument is L1 and H1; the other, L2 and H2.
255 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
256
257 int
258 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
259 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
260 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
261 {
262 unsigned HOST_WIDE_INT l;
263 HOST_WIDE_INT h;
264
265 l = l1 + l2;
266 h = h1 + h2 + (l < l1);
267
268 *lv = l;
269 *hv = h;
270 return OVERFLOW_SUM_SIGN (h1, h2, h);
271 }
272
273 /* Negate a doubleword integer with doubleword result.
274 Return nonzero if the operation overflows, assuming it's signed.
275 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
276 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
277
278 int
279 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
280 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
281 {
282 if (l1 == 0)
283 {
284 *lv = 0;
285 *hv = - h1;
286 return (*hv & h1) < 0;
287 }
288 else
289 {
290 *lv = -l1;
291 *hv = ~h1;
292 return 0;
293 }
294 }
295 \f
296 /* Multiply two doubleword integers with doubleword result.
297 Return nonzero if the operation overflows, assuming it's signed.
298 Each argument is given as two `HOST_WIDE_INT' pieces.
299 One argument is L1 and H1; the other, L2 and H2.
300 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
301
302 int
303 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
304 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
305 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
306 {
307 HOST_WIDE_INT arg1[4];
308 HOST_WIDE_INT arg2[4];
309 HOST_WIDE_INT prod[4 * 2];
310 unsigned HOST_WIDE_INT carry;
311 int i, j, k;
312 unsigned HOST_WIDE_INT toplow, neglow;
313 HOST_WIDE_INT tophigh, neghigh;
314
315 encode (arg1, l1, h1);
316 encode (arg2, l2, h2);
317
318 memset (prod, 0, sizeof prod);
319
320 for (i = 0; i < 4; i++)
321 {
322 carry = 0;
323 for (j = 0; j < 4; j++)
324 {
325 k = i + j;
326 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
327 carry += arg1[i] * arg2[j];
328 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
329 carry += prod[k];
330 prod[k] = LOWPART (carry);
331 carry = HIGHPART (carry);
332 }
333 prod[i + 4] = carry;
334 }
335
336 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
337
338 /* Check for overflow by calculating the top half of the answer in full;
339 it should agree with the low half's sign bit. */
340 decode (prod + 4, &toplow, &tophigh);
341 if (h1 < 0)
342 {
343 neg_double (l2, h2, &neglow, &neghigh);
344 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
345 }
346 if (h2 < 0)
347 {
348 neg_double (l1, h1, &neglow, &neghigh);
349 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
350 }
351 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
352 }
353 \f
354 /* Shift the doubleword integer in L1, H1 left by COUNT places
355 keeping only PREC bits of result.
356 Shift right if COUNT is negative.
357 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
358 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
359
360 void
361 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
362 HOST_WIDE_INT count, unsigned int prec,
363 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
364 {
365 unsigned HOST_WIDE_INT signmask;
366
367 if (count < 0)
368 {
369 rshift_double (l1, h1, -count, prec, lv, hv, arith);
370 return;
371 }
372
373 #ifdef SHIFT_COUNT_TRUNCATED
374 if (SHIFT_COUNT_TRUNCATED)
375 count %= prec;
376 #endif
377
378 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
379 {
380 /* Shifting by the host word size is undefined according to the
381 ANSI standard, so we must handle this as a special case. */
382 *hv = 0;
383 *lv = 0;
384 }
385 else if (count >= HOST_BITS_PER_WIDE_INT)
386 {
387 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
388 *lv = 0;
389 }
390 else
391 {
392 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
393 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
394 *lv = l1 << count;
395 }
396
397 /* Sign extend all bits that are beyond the precision. */
398
399 signmask = -((prec > HOST_BITS_PER_WIDE_INT
400 ? ((unsigned HOST_WIDE_INT) *hv
401 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
402 : (*lv >> (prec - 1))) & 1);
403
404 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
405 ;
406 else if (prec >= HOST_BITS_PER_WIDE_INT)
407 {
408 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
409 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
410 }
411 else
412 {
413 *hv = signmask;
414 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
415 *lv |= signmask << prec;
416 }
417 }
418
419 /* Shift the doubleword integer in L1, H1 right by COUNT places
420 keeping only PREC bits of result. COUNT must be positive.
421 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
422 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
423
424 void
425 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
426 HOST_WIDE_INT count, unsigned int prec,
427 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
428 int arith)
429 {
430 unsigned HOST_WIDE_INT signmask;
431
432 signmask = (arith
433 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
434 : 0);
435
436 #ifdef SHIFT_COUNT_TRUNCATED
437 if (SHIFT_COUNT_TRUNCATED)
438 count %= prec;
439 #endif
440
441 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
442 {
443 /* Shifting by the host word size is undefined according to the
444 ANSI standard, so we must handle this as a special case. */
445 *hv = 0;
446 *lv = 0;
447 }
448 else if (count >= HOST_BITS_PER_WIDE_INT)
449 {
450 *hv = 0;
451 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
452 }
453 else
454 {
455 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
456 *lv = ((l1 >> count)
457 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
458 }
459
460 /* Zero / sign extend all bits that are beyond the precision. */
461
462 if (count >= (HOST_WIDE_INT)prec)
463 {
464 *hv = signmask;
465 *lv = signmask;
466 }
467 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
468 ;
469 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
470 {
471 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
472 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
473 }
474 else
475 {
476 *hv = signmask;
477 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
478 *lv |= signmask << (prec - count);
479 }
480 }
481 \f
482 /* Rotate the doubleword integer in L1, H1 left by COUNT places
483 keeping only PREC bits of result.
484 Rotate right if COUNT is negative.
485 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
486
487 void
488 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
489 HOST_WIDE_INT count, unsigned int prec,
490 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
491 {
492 unsigned HOST_WIDE_INT s1l, s2l;
493 HOST_WIDE_INT s1h, s2h;
494
495 count %= prec;
496 if (count < 0)
497 count += prec;
498
499 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
500 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
501 *lv = s1l | s2l;
502 *hv = s1h | s2h;
503 }
504
505 /* Rotate the doubleword integer in L1, H1 left by COUNT places
506 keeping only PREC bits of result. COUNT must be positive.
507 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
508
509 void
510 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
511 HOST_WIDE_INT count, unsigned int prec,
512 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
513 {
514 unsigned HOST_WIDE_INT s1l, s2l;
515 HOST_WIDE_INT s1h, s2h;
516
517 count %= prec;
518 if (count < 0)
519 count += prec;
520
521 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
522 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
523 *lv = s1l | s2l;
524 *hv = s1h | s2h;
525 }
526 \f
527 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
528 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
529 CODE is a tree code for a kind of division, one of
530 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
531 or EXACT_DIV_EXPR
532 It controls how the quotient is rounded to an integer.
533 Return nonzero if the operation overflows.
534 UNS nonzero says do unsigned division. */
535
536 int
537 div_and_round_double (enum tree_code code, int uns,
538 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
539 HOST_WIDE_INT hnum_orig,
540 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
541 HOST_WIDE_INT hden_orig,
542 unsigned HOST_WIDE_INT *lquo,
543 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
544 HOST_WIDE_INT *hrem)
545 {
546 int quo_neg = 0;
547 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
548 HOST_WIDE_INT den[4], quo[4];
549 int i, j;
550 unsigned HOST_WIDE_INT work;
551 unsigned HOST_WIDE_INT carry = 0;
552 unsigned HOST_WIDE_INT lnum = lnum_orig;
553 HOST_WIDE_INT hnum = hnum_orig;
554 unsigned HOST_WIDE_INT lden = lden_orig;
555 HOST_WIDE_INT hden = hden_orig;
556 int overflow = 0;
557
558 if (hden == 0 && lden == 0)
559 overflow = 1, lden = 1;
560
561 /* calculate quotient sign and convert operands to unsigned. */
562 if (!uns)
563 {
564 if (hnum < 0)
565 {
566 quo_neg = ~ quo_neg;
567 /* (minimum integer) / (-1) is the only overflow case. */
568 if (neg_double (lnum, hnum, &lnum, &hnum)
569 && ((HOST_WIDE_INT) lden & hden) == -1)
570 overflow = 1;
571 }
572 if (hden < 0)
573 {
574 quo_neg = ~ quo_neg;
575 neg_double (lden, hden, &lden, &hden);
576 }
577 }
578
579 if (hnum == 0 && hden == 0)
580 { /* single precision */
581 *hquo = *hrem = 0;
582 /* This unsigned division rounds toward zero. */
583 *lquo = lnum / lden;
584 goto finish_up;
585 }
586
587 if (hnum == 0)
588 { /* trivial case: dividend < divisor */
589 /* hden != 0 already checked. */
590 *hquo = *lquo = 0;
591 *hrem = hnum;
592 *lrem = lnum;
593 goto finish_up;
594 }
595
596 memset (quo, 0, sizeof quo);
597
598 memset (num, 0, sizeof num); /* to zero 9th element */
599 memset (den, 0, sizeof den);
600
601 encode (num, lnum, hnum);
602 encode (den, lden, hden);
603
604 /* Special code for when the divisor < BASE. */
605 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
606 {
607 /* hnum != 0 already checked. */
608 for (i = 4 - 1; i >= 0; i--)
609 {
610 work = num[i] + carry * BASE;
611 quo[i] = work / lden;
612 carry = work % lden;
613 }
614 }
615 else
616 {
617 /* Full double precision division,
618 with thanks to Don Knuth's "Seminumerical Algorithms". */
619 int num_hi_sig, den_hi_sig;
620 unsigned HOST_WIDE_INT quo_est, scale;
621
622 /* Find the highest nonzero divisor digit. */
623 for (i = 4 - 1;; i--)
624 if (den[i] != 0)
625 {
626 den_hi_sig = i;
627 break;
628 }
629
630 /* Insure that the first digit of the divisor is at least BASE/2.
631 This is required by the quotient digit estimation algorithm. */
632
633 scale = BASE / (den[den_hi_sig] + 1);
634 if (scale > 1)
635 { /* scale divisor and dividend */
636 carry = 0;
637 for (i = 0; i <= 4 - 1; i++)
638 {
639 work = (num[i] * scale) + carry;
640 num[i] = LOWPART (work);
641 carry = HIGHPART (work);
642 }
643
644 num[4] = carry;
645 carry = 0;
646 for (i = 0; i <= 4 - 1; i++)
647 {
648 work = (den[i] * scale) + carry;
649 den[i] = LOWPART (work);
650 carry = HIGHPART (work);
651 if (den[i] != 0) den_hi_sig = i;
652 }
653 }
654
655 num_hi_sig = 4;
656
657 /* Main loop */
658 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
659 {
660 /* Guess the next quotient digit, quo_est, by dividing the first
661 two remaining dividend digits by the high order quotient digit.
662 quo_est is never low and is at most 2 high. */
663 unsigned HOST_WIDE_INT tmp;
664
665 num_hi_sig = i + den_hi_sig + 1;
666 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
667 if (num[num_hi_sig] != den[den_hi_sig])
668 quo_est = work / den[den_hi_sig];
669 else
670 quo_est = BASE - 1;
671
672 /* Refine quo_est so it's usually correct, and at most one high. */
673 tmp = work - quo_est * den[den_hi_sig];
674 if (tmp < BASE
675 && (den[den_hi_sig - 1] * quo_est
676 > (tmp * BASE + num[num_hi_sig - 2])))
677 quo_est--;
678
679 /* Try QUO_EST as the quotient digit, by multiplying the
680 divisor by QUO_EST and subtracting from the remaining dividend.
681 Keep in mind that QUO_EST is the I - 1st digit. */
682
683 carry = 0;
684 for (j = 0; j <= den_hi_sig; j++)
685 {
686 work = quo_est * den[j] + carry;
687 carry = HIGHPART (work);
688 work = num[i + j] - LOWPART (work);
689 num[i + j] = LOWPART (work);
690 carry += HIGHPART (work) != 0;
691 }
692
693 /* If quo_est was high by one, then num[i] went negative and
694 we need to correct things. */
695 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
696 {
697 quo_est--;
698 carry = 0; /* add divisor back in */
699 for (j = 0; j <= den_hi_sig; j++)
700 {
701 work = num[i + j] + den[j] + carry;
702 carry = HIGHPART (work);
703 num[i + j] = LOWPART (work);
704 }
705
706 num [num_hi_sig] += carry;
707 }
708
709 /* Store the quotient digit. */
710 quo[i] = quo_est;
711 }
712 }
713
714 decode (quo, lquo, hquo);
715
716 finish_up:
717 /* If result is negative, make it so. */
718 if (quo_neg)
719 neg_double (*lquo, *hquo, lquo, hquo);
720
721 /* compute trial remainder: rem = num - (quo * den) */
722 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
723 neg_double (*lrem, *hrem, lrem, hrem);
724 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
725
726 switch (code)
727 {
728 case TRUNC_DIV_EXPR:
729 case TRUNC_MOD_EXPR: /* round toward zero */
730 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
731 return overflow;
732
733 case FLOOR_DIV_EXPR:
734 case FLOOR_MOD_EXPR: /* round toward negative infinity */
735 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
736 {
737 /* quo = quo - 1; */
738 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
739 lquo, hquo);
740 }
741 else
742 return overflow;
743 break;
744
745 case CEIL_DIV_EXPR:
746 case CEIL_MOD_EXPR: /* round toward positive infinity */
747 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
748 {
749 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
750 lquo, hquo);
751 }
752 else
753 return overflow;
754 break;
755
756 case ROUND_DIV_EXPR:
757 case ROUND_MOD_EXPR: /* round to closest integer */
758 {
759 unsigned HOST_WIDE_INT labs_rem = *lrem;
760 HOST_WIDE_INT habs_rem = *hrem;
761 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
762 HOST_WIDE_INT habs_den = hden, htwice;
763
764 /* Get absolute values. */
765 if (*hrem < 0)
766 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
767 if (hden < 0)
768 neg_double (lden, hden, &labs_den, &habs_den);
769
770 /* If (2 * abs (lrem) >= abs (lden)) */
771 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
772 labs_rem, habs_rem, &ltwice, &htwice);
773
774 if (((unsigned HOST_WIDE_INT) habs_den
775 < (unsigned HOST_WIDE_INT) htwice)
776 || (((unsigned HOST_WIDE_INT) habs_den
777 == (unsigned HOST_WIDE_INT) htwice)
778 && (labs_den < ltwice)))
779 {
780 if (*hquo < 0)
781 /* quo = quo - 1; */
782 add_double (*lquo, *hquo,
783 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
784 else
785 /* quo = quo + 1; */
786 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
787 lquo, hquo);
788 }
789 else
790 return overflow;
791 }
792 break;
793
794 default:
795 abort ();
796 }
797
798 /* compute true remainder: rem = num - (quo * den) */
799 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
800 neg_double (*lrem, *hrem, lrem, hrem);
801 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
802 return overflow;
803 }
804 \f
805 /* Determine whether an expression T can be cheaply negated using
806 the function negate_expr. */
807
808 static bool
809 negate_expr_p (tree t)
810 {
811 unsigned HOST_WIDE_INT val;
812 unsigned int prec;
813 tree type;
814
815 if (t == 0)
816 return false;
817
818 type = TREE_TYPE (t);
819
820 STRIP_SIGN_NOPS (t);
821 switch (TREE_CODE (t))
822 {
823 case INTEGER_CST:
824 if (TREE_UNSIGNED (type))
825 return false;
826
827 /* Check that -CST will not overflow type. */
828 prec = TYPE_PRECISION (type);
829 if (prec > HOST_BITS_PER_WIDE_INT)
830 {
831 if (TREE_INT_CST_LOW (t) != 0)
832 return true;
833 prec -= HOST_BITS_PER_WIDE_INT;
834 val = TREE_INT_CST_HIGH (t);
835 }
836 else
837 val = TREE_INT_CST_LOW (t);
838 if (prec < HOST_BITS_PER_WIDE_INT)
839 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
840 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
841
842 case REAL_CST:
843 case NEGATE_EXPR:
844 return true;
845
846 case MINUS_EXPR:
847 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
848 return ! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations;
849
850 case MULT_EXPR:
851 if (TREE_UNSIGNED (TREE_TYPE (t)))
852 break;
853
854 /* Fall through. */
855
856 case RDIV_EXPR:
857 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
858 return negate_expr_p (TREE_OPERAND (t, 1))
859 || negate_expr_p (TREE_OPERAND (t, 0));
860 break;
861
862 default:
863 break;
864 }
865 return false;
866 }
867
868 /* Given T, an expression, return the negation of T. Allow for T to be
869 null, in which case return null. */
870
871 static tree
872 negate_expr (tree t)
873 {
874 tree type;
875 tree tem;
876
877 if (t == 0)
878 return 0;
879
880 type = TREE_TYPE (t);
881 STRIP_SIGN_NOPS (t);
882
883 switch (TREE_CODE (t))
884 {
885 case INTEGER_CST:
886 if (! TREE_UNSIGNED (type)
887 && 0 != (tem = fold (build1 (NEGATE_EXPR, type, t)))
888 && ! TREE_OVERFLOW (tem))
889 return tem;
890 break;
891
892 case REAL_CST:
893 tem = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (t)));
894 /* Two's complement FP formats, such as c4x, may overflow. */
895 if (! TREE_OVERFLOW (tem))
896 return convert (type, tem);
897 break;
898
899 case NEGATE_EXPR:
900 return convert (type, TREE_OPERAND (t, 0));
901
902 case MINUS_EXPR:
903 /* - (A - B) -> B - A */
904 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
905 return convert (type,
906 fold (build (MINUS_EXPR, TREE_TYPE (t),
907 TREE_OPERAND (t, 1),
908 TREE_OPERAND (t, 0))));
909 break;
910
911 case MULT_EXPR:
912 if (TREE_UNSIGNED (TREE_TYPE (t)))
913 break;
914
915 /* Fall through. */
916
917 case RDIV_EXPR:
918 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
919 {
920 tem = TREE_OPERAND (t, 1);
921 if (negate_expr_p (tem))
922 return convert (type,
923 fold (build (TREE_CODE (t), TREE_TYPE (t),
924 TREE_OPERAND (t, 0),
925 negate_expr (tem))));
926 tem = TREE_OPERAND (t, 0);
927 if (negate_expr_p (tem))
928 return convert (type,
929 fold (build (TREE_CODE (t), TREE_TYPE (t),
930 negate_expr (tem),
931 TREE_OPERAND (t, 1))));
932 }
933 break;
934
935 default:
936 break;
937 }
938
939 return convert (type, fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t)));
940 }
941 \f
942 /* Split a tree IN into a constant, literal and variable parts that could be
943 combined with CODE to make IN. "constant" means an expression with
944 TREE_CONSTANT but that isn't an actual constant. CODE must be a
945 commutative arithmetic operation. Store the constant part into *CONP,
946 the literal in *LITP and return the variable part. If a part isn't
947 present, set it to null. If the tree does not decompose in this way,
948 return the entire tree as the variable part and the other parts as null.
949
950 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
951 case, we negate an operand that was subtracted. Except if it is a
952 literal for which we use *MINUS_LITP instead.
953
954 If NEGATE_P is true, we are negating all of IN, again except a literal
955 for which we use *MINUS_LITP instead.
956
957 If IN is itself a literal or constant, return it as appropriate.
958
959 Note that we do not guarantee that any of the three values will be the
960 same type as IN, but they will have the same signedness and mode. */
961
962 static tree
963 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
964 tree *minus_litp, int negate_p)
965 {
966 tree var = 0;
967
968 *conp = 0;
969 *litp = 0;
970 *minus_litp = 0;
971
972 /* Strip any conversions that don't change the machine mode or signedness. */
973 STRIP_SIGN_NOPS (in);
974
975 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
976 *litp = in;
977 else if (TREE_CODE (in) == code
978 || (! FLOAT_TYPE_P (TREE_TYPE (in))
979 /* We can associate addition and subtraction together (even
980 though the C standard doesn't say so) for integers because
981 the value is not affected. For reals, the value might be
982 affected, so we can't. */
983 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
984 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
985 {
986 tree op0 = TREE_OPERAND (in, 0);
987 tree op1 = TREE_OPERAND (in, 1);
988 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
989 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
990
991 /* First see if either of the operands is a literal, then a constant. */
992 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
993 *litp = op0, op0 = 0;
994 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
995 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
996
997 if (op0 != 0 && TREE_CONSTANT (op0))
998 *conp = op0, op0 = 0;
999 else if (op1 != 0 && TREE_CONSTANT (op1))
1000 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1001
1002 /* If we haven't dealt with either operand, this is not a case we can
1003 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1004 if (op0 != 0 && op1 != 0)
1005 var = in;
1006 else if (op0 != 0)
1007 var = op0;
1008 else
1009 var = op1, neg_var_p = neg1_p;
1010
1011 /* Now do any needed negations. */
1012 if (neg_litp_p)
1013 *minus_litp = *litp, *litp = 0;
1014 if (neg_conp_p)
1015 *conp = negate_expr (*conp);
1016 if (neg_var_p)
1017 var = negate_expr (var);
1018 }
1019 else if (TREE_CONSTANT (in))
1020 *conp = in;
1021 else
1022 var = in;
1023
1024 if (negate_p)
1025 {
1026 if (*litp)
1027 *minus_litp = *litp, *litp = 0;
1028 else if (*minus_litp)
1029 *litp = *minus_litp, *minus_litp = 0;
1030 *conp = negate_expr (*conp);
1031 var = negate_expr (var);
1032 }
1033
1034 return var;
1035 }
1036
1037 /* Re-associate trees split by the above function. T1 and T2 are either
1038 expressions to associate or null. Return the new expression, if any. If
1039 we build an operation, do it in TYPE and with CODE. */
1040
1041 static tree
1042 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1043 {
1044 if (t1 == 0)
1045 return t2;
1046 else if (t2 == 0)
1047 return t1;
1048
1049 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1050 try to fold this since we will have infinite recursion. But do
1051 deal with any NEGATE_EXPRs. */
1052 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1053 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1054 {
1055 if (code == PLUS_EXPR)
1056 {
1057 if (TREE_CODE (t1) == NEGATE_EXPR)
1058 return build (MINUS_EXPR, type, convert (type, t2),
1059 convert (type, TREE_OPERAND (t1, 0)));
1060 else if (TREE_CODE (t2) == NEGATE_EXPR)
1061 return build (MINUS_EXPR, type, convert (type, t1),
1062 convert (type, TREE_OPERAND (t2, 0)));
1063 }
1064 return build (code, type, convert (type, t1), convert (type, t2));
1065 }
1066
1067 return fold (build (code, type, convert (type, t1), convert (type, t2)));
1068 }
1069 \f
1070 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1071 to produce a new constant.
1072
1073 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1074
1075 static tree
1076 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1077 {
1078 unsigned HOST_WIDE_INT int1l, int2l;
1079 HOST_WIDE_INT int1h, int2h;
1080 unsigned HOST_WIDE_INT low;
1081 HOST_WIDE_INT hi;
1082 unsigned HOST_WIDE_INT garbagel;
1083 HOST_WIDE_INT garbageh;
1084 tree t;
1085 tree type = TREE_TYPE (arg1);
1086 int uns = TREE_UNSIGNED (type);
1087 int is_sizetype
1088 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1089 int overflow = 0;
1090 int no_overflow = 0;
1091
1092 int1l = TREE_INT_CST_LOW (arg1);
1093 int1h = TREE_INT_CST_HIGH (arg1);
1094 int2l = TREE_INT_CST_LOW (arg2);
1095 int2h = TREE_INT_CST_HIGH (arg2);
1096
1097 switch (code)
1098 {
1099 case BIT_IOR_EXPR:
1100 low = int1l | int2l, hi = int1h | int2h;
1101 break;
1102
1103 case BIT_XOR_EXPR:
1104 low = int1l ^ int2l, hi = int1h ^ int2h;
1105 break;
1106
1107 case BIT_AND_EXPR:
1108 low = int1l & int2l, hi = int1h & int2h;
1109 break;
1110
1111 case BIT_ANDTC_EXPR:
1112 low = int1l & ~int2l, hi = int1h & ~int2h;
1113 break;
1114
1115 case RSHIFT_EXPR:
1116 int2l = -int2l;
1117 case LSHIFT_EXPR:
1118 /* It's unclear from the C standard whether shifts can overflow.
1119 The following code ignores overflow; perhaps a C standard
1120 interpretation ruling is needed. */
1121 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1122 &low, &hi, !uns);
1123 no_overflow = 1;
1124 break;
1125
1126 case RROTATE_EXPR:
1127 int2l = - int2l;
1128 case LROTATE_EXPR:
1129 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1130 &low, &hi);
1131 break;
1132
1133 case PLUS_EXPR:
1134 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1135 break;
1136
1137 case MINUS_EXPR:
1138 neg_double (int2l, int2h, &low, &hi);
1139 add_double (int1l, int1h, low, hi, &low, &hi);
1140 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1141 break;
1142
1143 case MULT_EXPR:
1144 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1145 break;
1146
1147 case TRUNC_DIV_EXPR:
1148 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1149 case EXACT_DIV_EXPR:
1150 /* This is a shortcut for a common special case. */
1151 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1152 && ! TREE_CONSTANT_OVERFLOW (arg1)
1153 && ! TREE_CONSTANT_OVERFLOW (arg2)
1154 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1155 {
1156 if (code == CEIL_DIV_EXPR)
1157 int1l += int2l - 1;
1158
1159 low = int1l / int2l, hi = 0;
1160 break;
1161 }
1162
1163 /* ... fall through ... */
1164
1165 case ROUND_DIV_EXPR:
1166 if (int2h == 0 && int2l == 1)
1167 {
1168 low = int1l, hi = int1h;
1169 break;
1170 }
1171 if (int1l == int2l && int1h == int2h
1172 && ! (int1l == 0 && int1h == 0))
1173 {
1174 low = 1, hi = 0;
1175 break;
1176 }
1177 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1178 &low, &hi, &garbagel, &garbageh);
1179 break;
1180
1181 case TRUNC_MOD_EXPR:
1182 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1183 /* This is a shortcut for a common special case. */
1184 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1185 && ! TREE_CONSTANT_OVERFLOW (arg1)
1186 && ! TREE_CONSTANT_OVERFLOW (arg2)
1187 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1188 {
1189 if (code == CEIL_MOD_EXPR)
1190 int1l += int2l - 1;
1191 low = int1l % int2l, hi = 0;
1192 break;
1193 }
1194
1195 /* ... fall through ... */
1196
1197 case ROUND_MOD_EXPR:
1198 overflow = div_and_round_double (code, uns,
1199 int1l, int1h, int2l, int2h,
1200 &garbagel, &garbageh, &low, &hi);
1201 break;
1202
1203 case MIN_EXPR:
1204 case MAX_EXPR:
1205 if (uns)
1206 low = (((unsigned HOST_WIDE_INT) int1h
1207 < (unsigned HOST_WIDE_INT) int2h)
1208 || (((unsigned HOST_WIDE_INT) int1h
1209 == (unsigned HOST_WIDE_INT) int2h)
1210 && int1l < int2l));
1211 else
1212 low = (int1h < int2h
1213 || (int1h == int2h && int1l < int2l));
1214
1215 if (low == (code == MIN_EXPR))
1216 low = int1l, hi = int1h;
1217 else
1218 low = int2l, hi = int2h;
1219 break;
1220
1221 default:
1222 abort ();
1223 }
1224
1225 /* If this is for a sizetype, can be represented as one (signed)
1226 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1227 constants. */
1228 if (is_sizetype
1229 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1230 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1231 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1232 return size_int_type_wide (low, type);
1233 else
1234 {
1235 t = build_int_2 (low, hi);
1236 TREE_TYPE (t) = TREE_TYPE (arg1);
1237 }
1238
1239 TREE_OVERFLOW (t)
1240 = ((notrunc
1241 ? (!uns || is_sizetype) && overflow
1242 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1243 && ! no_overflow))
1244 | TREE_OVERFLOW (arg1)
1245 | TREE_OVERFLOW (arg2));
1246
1247 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1248 So check if force_fit_type truncated the value. */
1249 if (is_sizetype
1250 && ! TREE_OVERFLOW (t)
1251 && (TREE_INT_CST_HIGH (t) != hi
1252 || TREE_INT_CST_LOW (t) != low))
1253 TREE_OVERFLOW (t) = 1;
1254
1255 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1256 | TREE_CONSTANT_OVERFLOW (arg1)
1257 | TREE_CONSTANT_OVERFLOW (arg2));
1258 return t;
1259 }
1260
1261 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1262 constant. We assume ARG1 and ARG2 have the same data type, or at least
1263 are the same kind of constant and the same machine mode.
1264
1265 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1266
1267 static tree
1268 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1269 {
1270 STRIP_NOPS (arg1);
1271 STRIP_NOPS (arg2);
1272
1273 if (TREE_CODE (arg1) == INTEGER_CST)
1274 return int_const_binop (code, arg1, arg2, notrunc);
1275
1276 if (TREE_CODE (arg1) == REAL_CST)
1277 {
1278 enum machine_mode mode;
1279 REAL_VALUE_TYPE d1;
1280 REAL_VALUE_TYPE d2;
1281 REAL_VALUE_TYPE value;
1282 tree t, type;
1283
1284 d1 = TREE_REAL_CST (arg1);
1285 d2 = TREE_REAL_CST (arg2);
1286
1287 type = TREE_TYPE (arg1);
1288 mode = TYPE_MODE (type);
1289
1290 /* Don't perform operation if we honor signaling NaNs and
1291 either operand is a NaN. */
1292 if (HONOR_SNANS (mode)
1293 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1294 return NULL_TREE;
1295
1296 /* Don't perform operation if it would raise a division
1297 by zero exception. */
1298 if (code == RDIV_EXPR
1299 && REAL_VALUES_EQUAL (d2, dconst0)
1300 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1301 return NULL_TREE;
1302
1303 /* If either operand is a NaN, just return it. Otherwise, set up
1304 for floating-point trap; we return an overflow. */
1305 if (REAL_VALUE_ISNAN (d1))
1306 return arg1;
1307 else if (REAL_VALUE_ISNAN (d2))
1308 return arg2;
1309
1310 REAL_ARITHMETIC (value, code, d1, d2);
1311
1312 t = build_real (type, real_value_truncate (mode, value));
1313
1314 TREE_OVERFLOW (t)
1315 = (force_fit_type (t, 0)
1316 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1317 TREE_CONSTANT_OVERFLOW (t)
1318 = TREE_OVERFLOW (t)
1319 | TREE_CONSTANT_OVERFLOW (arg1)
1320 | TREE_CONSTANT_OVERFLOW (arg2);
1321 return t;
1322 }
1323 if (TREE_CODE (arg1) == COMPLEX_CST)
1324 {
1325 tree type = TREE_TYPE (arg1);
1326 tree r1 = TREE_REALPART (arg1);
1327 tree i1 = TREE_IMAGPART (arg1);
1328 tree r2 = TREE_REALPART (arg2);
1329 tree i2 = TREE_IMAGPART (arg2);
1330 tree t;
1331
1332 switch (code)
1333 {
1334 case PLUS_EXPR:
1335 t = build_complex (type,
1336 const_binop (PLUS_EXPR, r1, r2, notrunc),
1337 const_binop (PLUS_EXPR, i1, i2, notrunc));
1338 break;
1339
1340 case MINUS_EXPR:
1341 t = build_complex (type,
1342 const_binop (MINUS_EXPR, r1, r2, notrunc),
1343 const_binop (MINUS_EXPR, i1, i2, notrunc));
1344 break;
1345
1346 case MULT_EXPR:
1347 t = build_complex (type,
1348 const_binop (MINUS_EXPR,
1349 const_binop (MULT_EXPR,
1350 r1, r2, notrunc),
1351 const_binop (MULT_EXPR,
1352 i1, i2, notrunc),
1353 notrunc),
1354 const_binop (PLUS_EXPR,
1355 const_binop (MULT_EXPR,
1356 r1, i2, notrunc),
1357 const_binop (MULT_EXPR,
1358 i1, r2, notrunc),
1359 notrunc));
1360 break;
1361
1362 case RDIV_EXPR:
1363 {
1364 tree magsquared
1365 = const_binop (PLUS_EXPR,
1366 const_binop (MULT_EXPR, r2, r2, notrunc),
1367 const_binop (MULT_EXPR, i2, i2, notrunc),
1368 notrunc);
1369
1370 t = build_complex (type,
1371 const_binop
1372 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1373 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1374 const_binop (PLUS_EXPR,
1375 const_binop (MULT_EXPR, r1, r2,
1376 notrunc),
1377 const_binop (MULT_EXPR, i1, i2,
1378 notrunc),
1379 notrunc),
1380 magsquared, notrunc),
1381 const_binop
1382 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1383 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1384 const_binop (MINUS_EXPR,
1385 const_binop (MULT_EXPR, i1, r2,
1386 notrunc),
1387 const_binop (MULT_EXPR, r1, i2,
1388 notrunc),
1389 notrunc),
1390 magsquared, notrunc));
1391 }
1392 break;
1393
1394 default:
1395 abort ();
1396 }
1397 return t;
1398 }
1399 return 0;
1400 }
1401
1402 /* These are the hash table functions for the hash table of INTEGER_CST
1403 nodes of a sizetype. */
1404
1405 /* Return the hash code code X, an INTEGER_CST. */
1406
1407 static hashval_t
1408 size_htab_hash (const void *x)
1409 {
1410 tree t = (tree) x;
1411
1412 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1413 ^ htab_hash_pointer (TREE_TYPE (t))
1414 ^ (TREE_OVERFLOW (t) << 20));
1415 }
1416
1417 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1418 is the same as that given by *Y, which is the same. */
1419
1420 static int
1421 size_htab_eq (const void *x, const void *y)
1422 {
1423 tree xt = (tree) x;
1424 tree yt = (tree) y;
1425
1426 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1427 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1428 && TREE_TYPE (xt) == TREE_TYPE (yt)
1429 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1430 }
1431 \f
1432 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1433 bits are given by NUMBER and of the sizetype represented by KIND. */
1434
1435 tree
1436 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1437 {
1438 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1439 }
1440
1441 /* Likewise, but the desired type is specified explicitly. */
1442
1443 static GTY (()) tree new_const;
1444 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1445 htab_t size_htab;
1446
1447 tree
1448 size_int_type_wide (HOST_WIDE_INT number, tree type)
1449 {
1450 void **slot;
1451
1452 if (size_htab == 0)
1453 {
1454 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1455 new_const = make_node (INTEGER_CST);
1456 }
1457
1458 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1459 hash table, we return the value from the hash table. Otherwise, we
1460 place that in the hash table and make a new node for the next time. */
1461 TREE_INT_CST_LOW (new_const) = number;
1462 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1463 TREE_TYPE (new_const) = type;
1464 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1465 = force_fit_type (new_const, 0);
1466
1467 slot = htab_find_slot (size_htab, new_const, INSERT);
1468 if (*slot == 0)
1469 {
1470 tree t = new_const;
1471
1472 *slot = new_const;
1473 new_const = make_node (INTEGER_CST);
1474 return t;
1475 }
1476 else
1477 return (tree) *slot;
1478 }
1479
1480 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1481 is a tree code. The type of the result is taken from the operands.
1482 Both must be the same type integer type and it must be a size type.
1483 If the operands are constant, so is the result. */
1484
1485 tree
1486 size_binop (enum tree_code code, tree arg0, tree arg1)
1487 {
1488 tree type = TREE_TYPE (arg0);
1489
1490 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1491 || type != TREE_TYPE (arg1))
1492 abort ();
1493
1494 /* Handle the special case of two integer constants faster. */
1495 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1496 {
1497 /* And some specific cases even faster than that. */
1498 if (code == PLUS_EXPR && integer_zerop (arg0))
1499 return arg1;
1500 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1501 && integer_zerop (arg1))
1502 return arg0;
1503 else if (code == MULT_EXPR && integer_onep (arg0))
1504 return arg1;
1505
1506 /* Handle general case of two integer constants. */
1507 return int_const_binop (code, arg0, arg1, 0);
1508 }
1509
1510 if (arg0 == error_mark_node || arg1 == error_mark_node)
1511 return error_mark_node;
1512
1513 return fold (build (code, type, arg0, arg1));
1514 }
1515
1516 /* Given two values, either both of sizetype or both of bitsizetype,
1517 compute the difference between the two values. Return the value
1518 in signed type corresponding to the type of the operands. */
1519
1520 tree
1521 size_diffop (tree arg0, tree arg1)
1522 {
1523 tree type = TREE_TYPE (arg0);
1524 tree ctype;
1525
1526 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1527 || type != TREE_TYPE (arg1))
1528 abort ();
1529
1530 /* If the type is already signed, just do the simple thing. */
1531 if (! TREE_UNSIGNED (type))
1532 return size_binop (MINUS_EXPR, arg0, arg1);
1533
1534 ctype = (type == bitsizetype || type == ubitsizetype
1535 ? sbitsizetype : ssizetype);
1536
1537 /* If either operand is not a constant, do the conversions to the signed
1538 type and subtract. The hardware will do the right thing with any
1539 overflow in the subtraction. */
1540 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1541 return size_binop (MINUS_EXPR, convert (ctype, arg0),
1542 convert (ctype, arg1));
1543
1544 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1545 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1546 overflow) and negate (which can't either). Special-case a result
1547 of zero while we're here. */
1548 if (tree_int_cst_equal (arg0, arg1))
1549 return convert (ctype, integer_zero_node);
1550 else if (tree_int_cst_lt (arg1, arg0))
1551 return convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1552 else
1553 return size_binop (MINUS_EXPR, convert (ctype, integer_zero_node),
1554 convert (ctype, size_binop (MINUS_EXPR, arg1, arg0)));
1555 }
1556 \f
1557
1558 /* Given T, a tree representing type conversion of ARG1, a constant,
1559 return a constant tree representing the result of conversion. */
1560
1561 static tree
1562 fold_convert (tree t, tree arg1)
1563 {
1564 tree type = TREE_TYPE (t);
1565 int overflow = 0;
1566
1567 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1568 {
1569 if (TREE_CODE (arg1) == INTEGER_CST)
1570 {
1571 /* If we would build a constant wider than GCC supports,
1572 leave the conversion unfolded. */
1573 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1574 return t;
1575
1576 /* If we are trying to make a sizetype for a small integer, use
1577 size_int to pick up cached types to reduce duplicate nodes. */
1578 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1579 && !TREE_CONSTANT_OVERFLOW (arg1)
1580 && compare_tree_int (arg1, 10000) < 0)
1581 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1582
1583 /* Given an integer constant, make new constant with new type,
1584 appropriately sign-extended or truncated. */
1585 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1586 TREE_INT_CST_HIGH (arg1));
1587 TREE_TYPE (t) = type;
1588 /* Indicate an overflow if (1) ARG1 already overflowed,
1589 or (2) force_fit_type indicates an overflow.
1590 Tell force_fit_type that an overflow has already occurred
1591 if ARG1 is a too-large unsigned value and T is signed.
1592 But don't indicate an overflow if converting a pointer. */
1593 TREE_OVERFLOW (t)
1594 = ((force_fit_type (t,
1595 (TREE_INT_CST_HIGH (arg1) < 0
1596 && (TREE_UNSIGNED (type)
1597 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1598 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1599 || TREE_OVERFLOW (arg1));
1600 TREE_CONSTANT_OVERFLOW (t)
1601 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1602 }
1603 else if (TREE_CODE (arg1) == REAL_CST)
1604 {
1605 /* Don't initialize these, use assignments.
1606 Initialized local aggregates don't work on old compilers. */
1607 REAL_VALUE_TYPE x;
1608 REAL_VALUE_TYPE l;
1609 REAL_VALUE_TYPE u;
1610 tree type1 = TREE_TYPE (arg1);
1611 int no_upper_bound;
1612
1613 x = TREE_REAL_CST (arg1);
1614 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1615
1616 no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
1617 if (!no_upper_bound)
1618 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1619
1620 /* See if X will be in range after truncation towards 0.
1621 To compensate for truncation, move the bounds away from 0,
1622 but reject if X exactly equals the adjusted bounds. */
1623 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1624 if (!no_upper_bound)
1625 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1626 /* If X is a NaN, use zero instead and show we have an overflow.
1627 Otherwise, range check. */
1628 if (REAL_VALUE_ISNAN (x))
1629 overflow = 1, x = dconst0;
1630 else if (! (REAL_VALUES_LESS (l, x)
1631 && !no_upper_bound
1632 && REAL_VALUES_LESS (x, u)))
1633 overflow = 1;
1634
1635 {
1636 HOST_WIDE_INT low, high;
1637 REAL_VALUE_TO_INT (&low, &high, x);
1638 t = build_int_2 (low, high);
1639 }
1640 TREE_TYPE (t) = type;
1641 TREE_OVERFLOW (t)
1642 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1643 TREE_CONSTANT_OVERFLOW (t)
1644 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1645 }
1646 TREE_TYPE (t) = type;
1647 }
1648 else if (TREE_CODE (type) == REAL_TYPE)
1649 {
1650 if (TREE_CODE (arg1) == INTEGER_CST)
1651 return build_real_from_int_cst (type, arg1);
1652 if (TREE_CODE (arg1) == REAL_CST)
1653 {
1654 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1655 {
1656 /* We make a copy of ARG1 so that we don't modify an
1657 existing constant tree. */
1658 t = copy_node (arg1);
1659 TREE_TYPE (t) = type;
1660 return t;
1661 }
1662
1663 t = build_real (type,
1664 real_value_truncate (TYPE_MODE (type),
1665 TREE_REAL_CST (arg1)));
1666
1667 TREE_OVERFLOW (t)
1668 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1669 TREE_CONSTANT_OVERFLOW (t)
1670 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1671 return t;
1672 }
1673 }
1674 TREE_CONSTANT (t) = 1;
1675 return t;
1676 }
1677 \f
1678 /* Return an expr equal to X but certainly not valid as an lvalue. */
1679
1680 tree
1681 non_lvalue (tree x)
1682 {
1683 tree result;
1684
1685 /* These things are certainly not lvalues. */
1686 if (TREE_CODE (x) == NON_LVALUE_EXPR
1687 || TREE_CODE (x) == INTEGER_CST
1688 || TREE_CODE (x) == REAL_CST
1689 || TREE_CODE (x) == STRING_CST
1690 || TREE_CODE (x) == ADDR_EXPR)
1691 return x;
1692
1693 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1694 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1695 return result;
1696 }
1697
1698 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1699 Zero means allow extended lvalues. */
1700
1701 int pedantic_lvalues;
1702
1703 /* When pedantic, return an expr equal to X but certainly not valid as a
1704 pedantic lvalue. Otherwise, return X. */
1705
1706 tree
1707 pedantic_non_lvalue (tree x)
1708 {
1709 if (pedantic_lvalues)
1710 return non_lvalue (x);
1711 else
1712 return x;
1713 }
1714 \f
1715 /* Given a tree comparison code, return the code that is the logical inverse
1716 of the given code. It is not safe to do this for floating-point
1717 comparisons, except for NE_EXPR and EQ_EXPR. */
1718
1719 static enum tree_code
1720 invert_tree_comparison (enum tree_code code)
1721 {
1722 switch (code)
1723 {
1724 case EQ_EXPR:
1725 return NE_EXPR;
1726 case NE_EXPR:
1727 return EQ_EXPR;
1728 case GT_EXPR:
1729 return LE_EXPR;
1730 case GE_EXPR:
1731 return LT_EXPR;
1732 case LT_EXPR:
1733 return GE_EXPR;
1734 case LE_EXPR:
1735 return GT_EXPR;
1736 default:
1737 abort ();
1738 }
1739 }
1740
1741 /* Similar, but return the comparison that results if the operands are
1742 swapped. This is safe for floating-point. */
1743
1744 static enum tree_code
1745 swap_tree_comparison (enum tree_code code)
1746 {
1747 switch (code)
1748 {
1749 case EQ_EXPR:
1750 case NE_EXPR:
1751 return code;
1752 case GT_EXPR:
1753 return LT_EXPR;
1754 case GE_EXPR:
1755 return LE_EXPR;
1756 case LT_EXPR:
1757 return GT_EXPR;
1758 case LE_EXPR:
1759 return GE_EXPR;
1760 default:
1761 abort ();
1762 }
1763 }
1764
1765
1766 /* Convert a comparison tree code from an enum tree_code representation
1767 into a compcode bit-based encoding. This function is the inverse of
1768 compcode_to_comparison. */
1769
1770 static int
1771 comparison_to_compcode (enum tree_code code)
1772 {
1773 switch (code)
1774 {
1775 case LT_EXPR:
1776 return COMPCODE_LT;
1777 case EQ_EXPR:
1778 return COMPCODE_EQ;
1779 case LE_EXPR:
1780 return COMPCODE_LE;
1781 case GT_EXPR:
1782 return COMPCODE_GT;
1783 case NE_EXPR:
1784 return COMPCODE_NE;
1785 case GE_EXPR:
1786 return COMPCODE_GE;
1787 default:
1788 abort ();
1789 }
1790 }
1791
1792 /* Convert a compcode bit-based encoding of a comparison operator back
1793 to GCC's enum tree_code representation. This function is the
1794 inverse of comparison_to_compcode. */
1795
1796 static enum tree_code
1797 compcode_to_comparison (int code)
1798 {
1799 switch (code)
1800 {
1801 case COMPCODE_LT:
1802 return LT_EXPR;
1803 case COMPCODE_EQ:
1804 return EQ_EXPR;
1805 case COMPCODE_LE:
1806 return LE_EXPR;
1807 case COMPCODE_GT:
1808 return GT_EXPR;
1809 case COMPCODE_NE:
1810 return NE_EXPR;
1811 case COMPCODE_GE:
1812 return GE_EXPR;
1813 default:
1814 abort ();
1815 }
1816 }
1817
1818 /* Return nonzero if CODE is a tree code that represents a truth value. */
1819
1820 static int
1821 truth_value_p (enum tree_code code)
1822 {
1823 return (TREE_CODE_CLASS (code) == '<'
1824 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1825 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1826 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1827 }
1828 \f
1829 /* Return nonzero if two operands are necessarily equal.
1830 If ONLY_CONST is nonzero, only return nonzero for constants.
1831 This function tests whether the operands are indistinguishable;
1832 it does not test whether they are equal using C's == operation.
1833 The distinction is important for IEEE floating point, because
1834 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1835 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1836
1837 int
1838 operand_equal_p (tree arg0, tree arg1, int only_const)
1839 {
1840 /* If both types don't have the same signedness, then we can't consider
1841 them equal. We must check this before the STRIP_NOPS calls
1842 because they may change the signedness of the arguments. */
1843 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1844 return 0;
1845
1846 STRIP_NOPS (arg0);
1847 STRIP_NOPS (arg1);
1848
1849 if (TREE_CODE (arg0) != TREE_CODE (arg1)
1850 /* This is needed for conversions and for COMPONENT_REF.
1851 Might as well play it safe and always test this. */
1852 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
1853 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
1854 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1855 return 0;
1856
1857 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1858 We don't care about side effects in that case because the SAVE_EXPR
1859 takes care of that for us. In all other cases, two expressions are
1860 equal if they have no side effects. If we have two identical
1861 expressions with side effects that should be treated the same due
1862 to the only side effects being identical SAVE_EXPR's, that will
1863 be detected in the recursive calls below. */
1864 if (arg0 == arg1 && ! only_const
1865 && (TREE_CODE (arg0) == SAVE_EXPR
1866 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
1867 return 1;
1868
1869 /* Next handle constant cases, those for which we can return 1 even
1870 if ONLY_CONST is set. */
1871 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
1872 switch (TREE_CODE (arg0))
1873 {
1874 case INTEGER_CST:
1875 return (! TREE_CONSTANT_OVERFLOW (arg0)
1876 && ! TREE_CONSTANT_OVERFLOW (arg1)
1877 && tree_int_cst_equal (arg0, arg1));
1878
1879 case REAL_CST:
1880 return (! TREE_CONSTANT_OVERFLOW (arg0)
1881 && ! TREE_CONSTANT_OVERFLOW (arg1)
1882 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
1883 TREE_REAL_CST (arg1)));
1884
1885 case VECTOR_CST:
1886 {
1887 tree v1, v2;
1888
1889 if (TREE_CONSTANT_OVERFLOW (arg0)
1890 || TREE_CONSTANT_OVERFLOW (arg1))
1891 return 0;
1892
1893 v1 = TREE_VECTOR_CST_ELTS (arg0);
1894 v2 = TREE_VECTOR_CST_ELTS (arg1);
1895 while (v1 && v2)
1896 {
1897 if (!operand_equal_p (v1, v2, only_const))
1898 return 0;
1899 v1 = TREE_CHAIN (v1);
1900 v2 = TREE_CHAIN (v2);
1901 }
1902
1903 return 1;
1904 }
1905
1906 case COMPLEX_CST:
1907 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
1908 only_const)
1909 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
1910 only_const));
1911
1912 case STRING_CST:
1913 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
1914 && ! memcmp (TREE_STRING_POINTER (arg0),
1915 TREE_STRING_POINTER (arg1),
1916 TREE_STRING_LENGTH (arg0)));
1917
1918 case ADDR_EXPR:
1919 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
1920 0);
1921 default:
1922 break;
1923 }
1924
1925 if (only_const)
1926 return 0;
1927
1928 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1929 {
1930 case '1':
1931 /* Two conversions are equal only if signedness and modes match. */
1932 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1933 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1934 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1935 return 0;
1936
1937 return operand_equal_p (TREE_OPERAND (arg0, 0),
1938 TREE_OPERAND (arg1, 0), 0);
1939
1940 case '<':
1941 case '2':
1942 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
1943 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
1944 0))
1945 return 1;
1946
1947 /* For commutative ops, allow the other order. */
1948 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
1949 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
1950 || TREE_CODE (arg0) == BIT_IOR_EXPR
1951 || TREE_CODE (arg0) == BIT_XOR_EXPR
1952 || TREE_CODE (arg0) == BIT_AND_EXPR
1953 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
1954 && operand_equal_p (TREE_OPERAND (arg0, 0),
1955 TREE_OPERAND (arg1, 1), 0)
1956 && operand_equal_p (TREE_OPERAND (arg0, 1),
1957 TREE_OPERAND (arg1, 0), 0));
1958
1959 case 'r':
1960 /* If either of the pointer (or reference) expressions we are
1961 dereferencing contain a side effect, these cannot be equal. */
1962 if (TREE_SIDE_EFFECTS (arg0)
1963 || TREE_SIDE_EFFECTS (arg1))
1964 return 0;
1965
1966 switch (TREE_CODE (arg0))
1967 {
1968 case INDIRECT_REF:
1969 return operand_equal_p (TREE_OPERAND (arg0, 0),
1970 TREE_OPERAND (arg1, 0), 0);
1971
1972 case COMPONENT_REF:
1973 case ARRAY_REF:
1974 case ARRAY_RANGE_REF:
1975 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1976 TREE_OPERAND (arg1, 0), 0)
1977 && operand_equal_p (TREE_OPERAND (arg0, 1),
1978 TREE_OPERAND (arg1, 1), 0));
1979
1980 case BIT_FIELD_REF:
1981 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1982 TREE_OPERAND (arg1, 0), 0)
1983 && operand_equal_p (TREE_OPERAND (arg0, 1),
1984 TREE_OPERAND (arg1, 1), 0)
1985 && operand_equal_p (TREE_OPERAND (arg0, 2),
1986 TREE_OPERAND (arg1, 2), 0));
1987 default:
1988 return 0;
1989 }
1990
1991 case 'e':
1992 switch (TREE_CODE (arg0))
1993 {
1994 case ADDR_EXPR:
1995 case TRUTH_NOT_EXPR:
1996 return operand_equal_p (TREE_OPERAND (arg0, 0),
1997 TREE_OPERAND (arg1, 0), 0);
1998
1999 case RTL_EXPR:
2000 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2001
2002 case CALL_EXPR:
2003 /* If the CALL_EXPRs call different functions, then they
2004 clearly can not be equal. */
2005 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2006 TREE_OPERAND (arg1, 0), 0))
2007 return 0;
2008
2009 /* Only consider const functions equivalent. */
2010 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR)
2011 {
2012 tree fndecl = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
2013 if (! (flags_from_decl_or_type (fndecl) & ECF_CONST))
2014 return 0;
2015 }
2016 else
2017 return 0;
2018
2019 /* Now see if all the arguments are the same. operand_equal_p
2020 does not handle TREE_LIST, so we walk the operands here
2021 feeding them to operand_equal_p. */
2022 arg0 = TREE_OPERAND (arg0, 1);
2023 arg1 = TREE_OPERAND (arg1, 1);
2024 while (arg0 && arg1)
2025 {
2026 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), 0))
2027 return 0;
2028
2029 arg0 = TREE_CHAIN (arg0);
2030 arg1 = TREE_CHAIN (arg1);
2031 }
2032
2033 /* If we get here and both argument lists are exhausted
2034 then the CALL_EXPRs are equal. */
2035 return ! (arg0 || arg1);
2036
2037 default:
2038 return 0;
2039 }
2040
2041 case 'd':
2042 /* Consider __builtin_sqrt equal to sqrt. */
2043 return TREE_CODE (arg0) == FUNCTION_DECL
2044 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2045 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2046 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1);
2047
2048 default:
2049 return 0;
2050 }
2051 }
2052 \f
2053 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2054 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2055
2056 When in doubt, return 0. */
2057
2058 static int
2059 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2060 {
2061 int unsignedp1, unsignedpo;
2062 tree primarg0, primarg1, primother;
2063 unsigned int correct_width;
2064
2065 if (operand_equal_p (arg0, arg1, 0))
2066 return 1;
2067
2068 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2069 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2070 return 0;
2071
2072 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2073 and see if the inner values are the same. This removes any
2074 signedness comparison, which doesn't matter here. */
2075 primarg0 = arg0, primarg1 = arg1;
2076 STRIP_NOPS (primarg0);
2077 STRIP_NOPS (primarg1);
2078 if (operand_equal_p (primarg0, primarg1, 0))
2079 return 1;
2080
2081 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2082 actual comparison operand, ARG0.
2083
2084 First throw away any conversions to wider types
2085 already present in the operands. */
2086
2087 primarg1 = get_narrower (arg1, &unsignedp1);
2088 primother = get_narrower (other, &unsignedpo);
2089
2090 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2091 if (unsignedp1 == unsignedpo
2092 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2093 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2094 {
2095 tree type = TREE_TYPE (arg0);
2096
2097 /* Make sure shorter operand is extended the right way
2098 to match the longer operand. */
2099 primarg1 = convert ((*lang_hooks.types.signed_or_unsigned_type)
2100 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2101
2102 if (operand_equal_p (arg0, convert (type, primarg1), 0))
2103 return 1;
2104 }
2105
2106 return 0;
2107 }
2108 \f
2109 /* See if ARG is an expression that is either a comparison or is performing
2110 arithmetic on comparisons. The comparisons must only be comparing
2111 two different values, which will be stored in *CVAL1 and *CVAL2; if
2112 they are nonzero it means that some operands have already been found.
2113 No variables may be used anywhere else in the expression except in the
2114 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2115 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2116
2117 If this is true, return 1. Otherwise, return zero. */
2118
2119 static int
2120 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2121 {
2122 enum tree_code code = TREE_CODE (arg);
2123 char class = TREE_CODE_CLASS (code);
2124
2125 /* We can handle some of the 'e' cases here. */
2126 if (class == 'e' && code == TRUTH_NOT_EXPR)
2127 class = '1';
2128 else if (class == 'e'
2129 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2130 || code == COMPOUND_EXPR))
2131 class = '2';
2132
2133 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2134 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2135 {
2136 /* If we've already found a CVAL1 or CVAL2, this expression is
2137 two complex to handle. */
2138 if (*cval1 || *cval2)
2139 return 0;
2140
2141 class = '1';
2142 *save_p = 1;
2143 }
2144
2145 switch (class)
2146 {
2147 case '1':
2148 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2149
2150 case '2':
2151 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2152 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2153 cval1, cval2, save_p));
2154
2155 case 'c':
2156 return 1;
2157
2158 case 'e':
2159 if (code == COND_EXPR)
2160 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2161 cval1, cval2, save_p)
2162 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2163 cval1, cval2, save_p)
2164 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2165 cval1, cval2, save_p));
2166 return 0;
2167
2168 case '<':
2169 /* First see if we can handle the first operand, then the second. For
2170 the second operand, we know *CVAL1 can't be zero. It must be that
2171 one side of the comparison is each of the values; test for the
2172 case where this isn't true by failing if the two operands
2173 are the same. */
2174
2175 if (operand_equal_p (TREE_OPERAND (arg, 0),
2176 TREE_OPERAND (arg, 1), 0))
2177 return 0;
2178
2179 if (*cval1 == 0)
2180 *cval1 = TREE_OPERAND (arg, 0);
2181 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2182 ;
2183 else if (*cval2 == 0)
2184 *cval2 = TREE_OPERAND (arg, 0);
2185 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2186 ;
2187 else
2188 return 0;
2189
2190 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2191 ;
2192 else if (*cval2 == 0)
2193 *cval2 = TREE_OPERAND (arg, 1);
2194 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2195 ;
2196 else
2197 return 0;
2198
2199 return 1;
2200
2201 default:
2202 return 0;
2203 }
2204 }
2205 \f
2206 /* ARG is a tree that is known to contain just arithmetic operations and
2207 comparisons. Evaluate the operations in the tree substituting NEW0 for
2208 any occurrence of OLD0 as an operand of a comparison and likewise for
2209 NEW1 and OLD1. */
2210
2211 static tree
2212 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2213 {
2214 tree type = TREE_TYPE (arg);
2215 enum tree_code code = TREE_CODE (arg);
2216 char class = TREE_CODE_CLASS (code);
2217
2218 /* We can handle some of the 'e' cases here. */
2219 if (class == 'e' && code == TRUTH_NOT_EXPR)
2220 class = '1';
2221 else if (class == 'e'
2222 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2223 class = '2';
2224
2225 switch (class)
2226 {
2227 case '1':
2228 return fold (build1 (code, type,
2229 eval_subst (TREE_OPERAND (arg, 0),
2230 old0, new0, old1, new1)));
2231
2232 case '2':
2233 return fold (build (code, type,
2234 eval_subst (TREE_OPERAND (arg, 0),
2235 old0, new0, old1, new1),
2236 eval_subst (TREE_OPERAND (arg, 1),
2237 old0, new0, old1, new1)));
2238
2239 case 'e':
2240 switch (code)
2241 {
2242 case SAVE_EXPR:
2243 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2244
2245 case COMPOUND_EXPR:
2246 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2247
2248 case COND_EXPR:
2249 return fold (build (code, type,
2250 eval_subst (TREE_OPERAND (arg, 0),
2251 old0, new0, old1, new1),
2252 eval_subst (TREE_OPERAND (arg, 1),
2253 old0, new0, old1, new1),
2254 eval_subst (TREE_OPERAND (arg, 2),
2255 old0, new0, old1, new1)));
2256 default:
2257 break;
2258 }
2259 /* Fall through - ??? */
2260
2261 case '<':
2262 {
2263 tree arg0 = TREE_OPERAND (arg, 0);
2264 tree arg1 = TREE_OPERAND (arg, 1);
2265
2266 /* We need to check both for exact equality and tree equality. The
2267 former will be true if the operand has a side-effect. In that
2268 case, we know the operand occurred exactly once. */
2269
2270 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2271 arg0 = new0;
2272 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2273 arg0 = new1;
2274
2275 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2276 arg1 = new0;
2277 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2278 arg1 = new1;
2279
2280 return fold (build (code, type, arg0, arg1));
2281 }
2282
2283 default:
2284 return arg;
2285 }
2286 }
2287 \f
2288 /* Return a tree for the case when the result of an expression is RESULT
2289 converted to TYPE and OMITTED was previously an operand of the expression
2290 but is now not needed (e.g., we folded OMITTED * 0).
2291
2292 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2293 the conversion of RESULT to TYPE. */
2294
2295 tree
2296 omit_one_operand (tree type, tree result, tree omitted)
2297 {
2298 tree t = convert (type, result);
2299
2300 if (TREE_SIDE_EFFECTS (omitted))
2301 return build (COMPOUND_EXPR, type, omitted, t);
2302
2303 return non_lvalue (t);
2304 }
2305
2306 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2307
2308 static tree
2309 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2310 {
2311 tree t = convert (type, result);
2312
2313 if (TREE_SIDE_EFFECTS (omitted))
2314 return build (COMPOUND_EXPR, type, omitted, t);
2315
2316 return pedantic_non_lvalue (t);
2317 }
2318 \f
2319 /* Return a simplified tree node for the truth-negation of ARG. This
2320 never alters ARG itself. We assume that ARG is an operation that
2321 returns a truth value (0 or 1). */
2322
2323 tree
2324 invert_truthvalue (tree arg)
2325 {
2326 tree type = TREE_TYPE (arg);
2327 enum tree_code code = TREE_CODE (arg);
2328
2329 if (code == ERROR_MARK)
2330 return arg;
2331
2332 /* If this is a comparison, we can simply invert it, except for
2333 floating-point non-equality comparisons, in which case we just
2334 enclose a TRUTH_NOT_EXPR around what we have. */
2335
2336 if (TREE_CODE_CLASS (code) == '<')
2337 {
2338 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2339 && !flag_unsafe_math_optimizations
2340 && code != NE_EXPR
2341 && code != EQ_EXPR)
2342 return build1 (TRUTH_NOT_EXPR, type, arg);
2343 else
2344 return build (invert_tree_comparison (code), type,
2345 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2346 }
2347
2348 switch (code)
2349 {
2350 case INTEGER_CST:
2351 return convert (type, build_int_2 (integer_zerop (arg), 0));
2352
2353 case TRUTH_AND_EXPR:
2354 return build (TRUTH_OR_EXPR, type,
2355 invert_truthvalue (TREE_OPERAND (arg, 0)),
2356 invert_truthvalue (TREE_OPERAND (arg, 1)));
2357
2358 case TRUTH_OR_EXPR:
2359 return build (TRUTH_AND_EXPR, type,
2360 invert_truthvalue (TREE_OPERAND (arg, 0)),
2361 invert_truthvalue (TREE_OPERAND (arg, 1)));
2362
2363 case TRUTH_XOR_EXPR:
2364 /* Here we can invert either operand. We invert the first operand
2365 unless the second operand is a TRUTH_NOT_EXPR in which case our
2366 result is the XOR of the first operand with the inside of the
2367 negation of the second operand. */
2368
2369 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2370 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2371 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2372 else
2373 return build (TRUTH_XOR_EXPR, type,
2374 invert_truthvalue (TREE_OPERAND (arg, 0)),
2375 TREE_OPERAND (arg, 1));
2376
2377 case TRUTH_ANDIF_EXPR:
2378 return build (TRUTH_ORIF_EXPR, type,
2379 invert_truthvalue (TREE_OPERAND (arg, 0)),
2380 invert_truthvalue (TREE_OPERAND (arg, 1)));
2381
2382 case TRUTH_ORIF_EXPR:
2383 return build (TRUTH_ANDIF_EXPR, type,
2384 invert_truthvalue (TREE_OPERAND (arg, 0)),
2385 invert_truthvalue (TREE_OPERAND (arg, 1)));
2386
2387 case TRUTH_NOT_EXPR:
2388 return TREE_OPERAND (arg, 0);
2389
2390 case COND_EXPR:
2391 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2392 invert_truthvalue (TREE_OPERAND (arg, 1)),
2393 invert_truthvalue (TREE_OPERAND (arg, 2)));
2394
2395 case COMPOUND_EXPR:
2396 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2397 invert_truthvalue (TREE_OPERAND (arg, 1)));
2398
2399 case WITH_RECORD_EXPR:
2400 return build (WITH_RECORD_EXPR, type,
2401 invert_truthvalue (TREE_OPERAND (arg, 0)),
2402 TREE_OPERAND (arg, 1));
2403
2404 case NON_LVALUE_EXPR:
2405 return invert_truthvalue (TREE_OPERAND (arg, 0));
2406
2407 case NOP_EXPR:
2408 case CONVERT_EXPR:
2409 case FLOAT_EXPR:
2410 return build1 (TREE_CODE (arg), type,
2411 invert_truthvalue (TREE_OPERAND (arg, 0)));
2412
2413 case BIT_AND_EXPR:
2414 if (!integer_onep (TREE_OPERAND (arg, 1)))
2415 break;
2416 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2417
2418 case SAVE_EXPR:
2419 return build1 (TRUTH_NOT_EXPR, type, arg);
2420
2421 case CLEANUP_POINT_EXPR:
2422 return build1 (CLEANUP_POINT_EXPR, type,
2423 invert_truthvalue (TREE_OPERAND (arg, 0)));
2424
2425 default:
2426 break;
2427 }
2428 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2429 abort ();
2430 return build1 (TRUTH_NOT_EXPR, type, arg);
2431 }
2432
2433 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2434 operands are another bit-wise operation with a common input. If so,
2435 distribute the bit operations to save an operation and possibly two if
2436 constants are involved. For example, convert
2437 (A | B) & (A | C) into A | (B & C)
2438 Further simplification will occur if B and C are constants.
2439
2440 If this optimization cannot be done, 0 will be returned. */
2441
2442 static tree
2443 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2444 {
2445 tree common;
2446 tree left, right;
2447
2448 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2449 || TREE_CODE (arg0) == code
2450 || (TREE_CODE (arg0) != BIT_AND_EXPR
2451 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2452 return 0;
2453
2454 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2455 {
2456 common = TREE_OPERAND (arg0, 0);
2457 left = TREE_OPERAND (arg0, 1);
2458 right = TREE_OPERAND (arg1, 1);
2459 }
2460 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2461 {
2462 common = TREE_OPERAND (arg0, 0);
2463 left = TREE_OPERAND (arg0, 1);
2464 right = TREE_OPERAND (arg1, 0);
2465 }
2466 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2467 {
2468 common = TREE_OPERAND (arg0, 1);
2469 left = TREE_OPERAND (arg0, 0);
2470 right = TREE_OPERAND (arg1, 1);
2471 }
2472 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2473 {
2474 common = TREE_OPERAND (arg0, 1);
2475 left = TREE_OPERAND (arg0, 0);
2476 right = TREE_OPERAND (arg1, 0);
2477 }
2478 else
2479 return 0;
2480
2481 return fold (build (TREE_CODE (arg0), type, common,
2482 fold (build (code, type, left, right))));
2483 }
2484 \f
2485 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2486 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2487
2488 static tree
2489 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
2490 int unsignedp)
2491 {
2492 tree result = build (BIT_FIELD_REF, type, inner,
2493 size_int (bitsize), bitsize_int (bitpos));
2494
2495 TREE_UNSIGNED (result) = unsignedp;
2496
2497 return result;
2498 }
2499
2500 /* Optimize a bit-field compare.
2501
2502 There are two cases: First is a compare against a constant and the
2503 second is a comparison of two items where the fields are at the same
2504 bit position relative to the start of a chunk (byte, halfword, word)
2505 large enough to contain it. In these cases we can avoid the shift
2506 implicit in bitfield extractions.
2507
2508 For constants, we emit a compare of the shifted constant with the
2509 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2510 compared. For two fields at the same position, we do the ANDs with the
2511 similar mask and compare the result of the ANDs.
2512
2513 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2514 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2515 are the left and right operands of the comparison, respectively.
2516
2517 If the optimization described above can be done, we return the resulting
2518 tree. Otherwise we return zero. */
2519
2520 static tree
2521 optimize_bit_field_compare (enum tree_code code, tree compare_type,
2522 tree lhs, tree rhs)
2523 {
2524 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2525 tree type = TREE_TYPE (lhs);
2526 tree signed_type, unsigned_type;
2527 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2528 enum machine_mode lmode, rmode, nmode;
2529 int lunsignedp, runsignedp;
2530 int lvolatilep = 0, rvolatilep = 0;
2531 tree linner, rinner = NULL_TREE;
2532 tree mask;
2533 tree offset;
2534
2535 /* Get all the information about the extractions being done. If the bit size
2536 if the same as the size of the underlying object, we aren't doing an
2537 extraction at all and so can do nothing. We also don't want to
2538 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2539 then will no longer be able to replace it. */
2540 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2541 &lunsignedp, &lvolatilep);
2542 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2543 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2544 return 0;
2545
2546 if (!const_p)
2547 {
2548 /* If this is not a constant, we can only do something if bit positions,
2549 sizes, and signedness are the same. */
2550 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2551 &runsignedp, &rvolatilep);
2552
2553 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2554 || lunsignedp != runsignedp || offset != 0
2555 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2556 return 0;
2557 }
2558
2559 /* See if we can find a mode to refer to this field. We should be able to,
2560 but fail if we can't. */
2561 nmode = get_best_mode (lbitsize, lbitpos,
2562 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2563 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2564 TYPE_ALIGN (TREE_TYPE (rinner))),
2565 word_mode, lvolatilep || rvolatilep);
2566 if (nmode == VOIDmode)
2567 return 0;
2568
2569 /* Set signed and unsigned types of the precision of this mode for the
2570 shifts below. */
2571 signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2572 unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2573
2574 /* Compute the bit position and size for the new reference and our offset
2575 within it. If the new reference is the same size as the original, we
2576 won't optimize anything, so return zero. */
2577 nbitsize = GET_MODE_BITSIZE (nmode);
2578 nbitpos = lbitpos & ~ (nbitsize - 1);
2579 lbitpos -= nbitpos;
2580 if (nbitsize == lbitsize)
2581 return 0;
2582
2583 if (BYTES_BIG_ENDIAN)
2584 lbitpos = nbitsize - lbitsize - lbitpos;
2585
2586 /* Make the mask to be used against the extracted field. */
2587 mask = build_int_2 (~0, ~0);
2588 TREE_TYPE (mask) = unsigned_type;
2589 force_fit_type (mask, 0);
2590 mask = convert (unsigned_type, mask);
2591 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2592 mask = const_binop (RSHIFT_EXPR, mask,
2593 size_int (nbitsize - lbitsize - lbitpos), 0);
2594
2595 if (! const_p)
2596 /* If not comparing with constant, just rework the comparison
2597 and return. */
2598 return build (code, compare_type,
2599 build (BIT_AND_EXPR, unsigned_type,
2600 make_bit_field_ref (linner, unsigned_type,
2601 nbitsize, nbitpos, 1),
2602 mask),
2603 build (BIT_AND_EXPR, unsigned_type,
2604 make_bit_field_ref (rinner, unsigned_type,
2605 nbitsize, nbitpos, 1),
2606 mask));
2607
2608 /* Otherwise, we are handling the constant case. See if the constant is too
2609 big for the field. Warn and return a tree of for 0 (false) if so. We do
2610 this not only for its own sake, but to avoid having to test for this
2611 error case below. If we didn't, we might generate wrong code.
2612
2613 For unsigned fields, the constant shifted right by the field length should
2614 be all zero. For signed fields, the high-order bits should agree with
2615 the sign bit. */
2616
2617 if (lunsignedp)
2618 {
2619 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2620 convert (unsigned_type, rhs),
2621 size_int (lbitsize), 0)))
2622 {
2623 warning ("comparison is always %d due to width of bit-field",
2624 code == NE_EXPR);
2625 return convert (compare_type,
2626 (code == NE_EXPR
2627 ? integer_one_node : integer_zero_node));
2628 }
2629 }
2630 else
2631 {
2632 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2633 size_int (lbitsize - 1), 0);
2634 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2635 {
2636 warning ("comparison is always %d due to width of bit-field",
2637 code == NE_EXPR);
2638 return convert (compare_type,
2639 (code == NE_EXPR
2640 ? integer_one_node : integer_zero_node));
2641 }
2642 }
2643
2644 /* Single-bit compares should always be against zero. */
2645 if (lbitsize == 1 && ! integer_zerop (rhs))
2646 {
2647 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2648 rhs = convert (type, integer_zero_node);
2649 }
2650
2651 /* Make a new bitfield reference, shift the constant over the
2652 appropriate number of bits and mask it with the computed mask
2653 (in case this was a signed field). If we changed it, make a new one. */
2654 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2655 if (lvolatilep)
2656 {
2657 TREE_SIDE_EFFECTS (lhs) = 1;
2658 TREE_THIS_VOLATILE (lhs) = 1;
2659 }
2660
2661 rhs = fold (const_binop (BIT_AND_EXPR,
2662 const_binop (LSHIFT_EXPR,
2663 convert (unsigned_type, rhs),
2664 size_int (lbitpos), 0),
2665 mask, 0));
2666
2667 return build (code, compare_type,
2668 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2669 rhs);
2670 }
2671 \f
2672 /* Subroutine for fold_truthop: decode a field reference.
2673
2674 If EXP is a comparison reference, we return the innermost reference.
2675
2676 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2677 set to the starting bit number.
2678
2679 If the innermost field can be completely contained in a mode-sized
2680 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2681
2682 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2683 otherwise it is not changed.
2684
2685 *PUNSIGNEDP is set to the signedness of the field.
2686
2687 *PMASK is set to the mask used. This is either contained in a
2688 BIT_AND_EXPR or derived from the width of the field.
2689
2690 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2691
2692 Return 0 if this is not a component reference or is one that we can't
2693 do anything with. */
2694
2695 static tree
2696 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
2697 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
2698 int *punsignedp, int *pvolatilep,
2699 tree *pmask, tree *pand_mask)
2700 {
2701 tree outer_type = 0;
2702 tree and_mask = 0;
2703 tree mask, inner, offset;
2704 tree unsigned_type;
2705 unsigned int precision;
2706
2707 /* All the optimizations using this function assume integer fields.
2708 There are problems with FP fields since the type_for_size call
2709 below can fail for, e.g., XFmode. */
2710 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2711 return 0;
2712
2713 /* We are interested in the bare arrangement of bits, so strip everything
2714 that doesn't affect the machine mode. However, record the type of the
2715 outermost expression if it may matter below. */
2716 if (TREE_CODE (exp) == NOP_EXPR
2717 || TREE_CODE (exp) == CONVERT_EXPR
2718 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2719 outer_type = TREE_TYPE (exp);
2720 STRIP_NOPS (exp);
2721
2722 if (TREE_CODE (exp) == BIT_AND_EXPR)
2723 {
2724 and_mask = TREE_OPERAND (exp, 1);
2725 exp = TREE_OPERAND (exp, 0);
2726 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2727 if (TREE_CODE (and_mask) != INTEGER_CST)
2728 return 0;
2729 }
2730
2731 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2732 punsignedp, pvolatilep);
2733 if ((inner == exp && and_mask == 0)
2734 || *pbitsize < 0 || offset != 0
2735 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
2736 return 0;
2737
2738 /* If the number of bits in the reference is the same as the bitsize of
2739 the outer type, then the outer type gives the signedness. Otherwise
2740 (in case of a small bitfield) the signedness is unchanged. */
2741 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
2742 *punsignedp = TREE_UNSIGNED (outer_type);
2743
2744 /* Compute the mask to access the bitfield. */
2745 unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
2746 precision = TYPE_PRECISION (unsigned_type);
2747
2748 mask = build_int_2 (~0, ~0);
2749 TREE_TYPE (mask) = unsigned_type;
2750 force_fit_type (mask, 0);
2751 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2752 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2753
2754 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2755 if (and_mask != 0)
2756 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2757 convert (unsigned_type, and_mask), mask));
2758
2759 *pmask = mask;
2760 *pand_mask = and_mask;
2761 return inner;
2762 }
2763
2764 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
2765 bit positions. */
2766
2767 static int
2768 all_ones_mask_p (tree mask, int size)
2769 {
2770 tree type = TREE_TYPE (mask);
2771 unsigned int precision = TYPE_PRECISION (type);
2772 tree tmask;
2773
2774 tmask = build_int_2 (~0, ~0);
2775 TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
2776 force_fit_type (tmask, 0);
2777 return
2778 tree_int_cst_equal (mask,
2779 const_binop (RSHIFT_EXPR,
2780 const_binop (LSHIFT_EXPR, tmask,
2781 size_int (precision - size),
2782 0),
2783 size_int (precision - size), 0));
2784 }
2785
2786 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
2787 represents the sign bit of EXP's type. If EXP represents a sign
2788 or zero extension, also test VAL against the unextended type.
2789 The return value is the (sub)expression whose sign bit is VAL,
2790 or NULL_TREE otherwise. */
2791
2792 static tree
2793 sign_bit_p (tree exp, tree val)
2794 {
2795 unsigned HOST_WIDE_INT mask_lo, lo;
2796 HOST_WIDE_INT mask_hi, hi;
2797 int width;
2798 tree t;
2799
2800 /* Tree EXP must have an integral type. */
2801 t = TREE_TYPE (exp);
2802 if (! INTEGRAL_TYPE_P (t))
2803 return NULL_TREE;
2804
2805 /* Tree VAL must be an integer constant. */
2806 if (TREE_CODE (val) != INTEGER_CST
2807 || TREE_CONSTANT_OVERFLOW (val))
2808 return NULL_TREE;
2809
2810 width = TYPE_PRECISION (t);
2811 if (width > HOST_BITS_PER_WIDE_INT)
2812 {
2813 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
2814 lo = 0;
2815
2816 mask_hi = ((unsigned HOST_WIDE_INT) -1
2817 >> (2 * HOST_BITS_PER_WIDE_INT - width));
2818 mask_lo = -1;
2819 }
2820 else
2821 {
2822 hi = 0;
2823 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
2824
2825 mask_hi = 0;
2826 mask_lo = ((unsigned HOST_WIDE_INT) -1
2827 >> (HOST_BITS_PER_WIDE_INT - width));
2828 }
2829
2830 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
2831 treat VAL as if it were unsigned. */
2832 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
2833 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
2834 return exp;
2835
2836 /* Handle extension from a narrower type. */
2837 if (TREE_CODE (exp) == NOP_EXPR
2838 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
2839 return sign_bit_p (TREE_OPERAND (exp, 0), val);
2840
2841 return NULL_TREE;
2842 }
2843
2844 /* Subroutine for fold_truthop: determine if an operand is simple enough
2845 to be evaluated unconditionally. */
2846
2847 static int
2848 simple_operand_p (tree exp)
2849 {
2850 /* Strip any conversions that don't change the machine mode. */
2851 while ((TREE_CODE (exp) == NOP_EXPR
2852 || TREE_CODE (exp) == CONVERT_EXPR)
2853 && (TYPE_MODE (TREE_TYPE (exp))
2854 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2855 exp = TREE_OPERAND (exp, 0);
2856
2857 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2858 || (DECL_P (exp)
2859 && ! TREE_ADDRESSABLE (exp)
2860 && ! TREE_THIS_VOLATILE (exp)
2861 && ! DECL_NONLOCAL (exp)
2862 /* Don't regard global variables as simple. They may be
2863 allocated in ways unknown to the compiler (shared memory,
2864 #pragma weak, etc). */
2865 && ! TREE_PUBLIC (exp)
2866 && ! DECL_EXTERNAL (exp)
2867 /* Loading a static variable is unduly expensive, but global
2868 registers aren't expensive. */
2869 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2870 }
2871 \f
2872 /* The following functions are subroutines to fold_range_test and allow it to
2873 try to change a logical combination of comparisons into a range test.
2874
2875 For example, both
2876 X == 2 || X == 3 || X == 4 || X == 5
2877 and
2878 X >= 2 && X <= 5
2879 are converted to
2880 (unsigned) (X - 2) <= 3
2881
2882 We describe each set of comparisons as being either inside or outside
2883 a range, using a variable named like IN_P, and then describe the
2884 range with a lower and upper bound. If one of the bounds is omitted,
2885 it represents either the highest or lowest value of the type.
2886
2887 In the comments below, we represent a range by two numbers in brackets
2888 preceded by a "+" to designate being inside that range, or a "-" to
2889 designate being outside that range, so the condition can be inverted by
2890 flipping the prefix. An omitted bound is represented by a "-". For
2891 example, "- [-, 10]" means being outside the range starting at the lowest
2892 possible value and ending at 10, in other words, being greater than 10.
2893 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
2894 always false.
2895
2896 We set up things so that the missing bounds are handled in a consistent
2897 manner so neither a missing bound nor "true" and "false" need to be
2898 handled using a special case. */
2899
2900 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
2901 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
2902 and UPPER1_P are nonzero if the respective argument is an upper bound
2903 and zero for a lower. TYPE, if nonzero, is the type of the result; it
2904 must be specified for a comparison. ARG1 will be converted to ARG0's
2905 type if both are specified. */
2906
2907 static tree
2908 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
2909 tree arg1, int upper1_p)
2910 {
2911 tree tem;
2912 int result;
2913 int sgn0, sgn1;
2914
2915 /* If neither arg represents infinity, do the normal operation.
2916 Else, if not a comparison, return infinity. Else handle the special
2917 comparison rules. Note that most of the cases below won't occur, but
2918 are handled for consistency. */
2919
2920 if (arg0 != 0 && arg1 != 0)
2921 {
2922 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
2923 arg0, convert (TREE_TYPE (arg0), arg1)));
2924 STRIP_NOPS (tem);
2925 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
2926 }
2927
2928 if (TREE_CODE_CLASS (code) != '<')
2929 return 0;
2930
2931 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
2932 for neither. In real maths, we cannot assume open ended ranges are
2933 the same. But, this is computer arithmetic, where numbers are finite.
2934 We can therefore make the transformation of any unbounded range with
2935 the value Z, Z being greater than any representable number. This permits
2936 us to treat unbounded ranges as equal. */
2937 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
2938 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
2939 switch (code)
2940 {
2941 case EQ_EXPR:
2942 result = sgn0 == sgn1;
2943 break;
2944 case NE_EXPR:
2945 result = sgn0 != sgn1;
2946 break;
2947 case LT_EXPR:
2948 result = sgn0 < sgn1;
2949 break;
2950 case LE_EXPR:
2951 result = sgn0 <= sgn1;
2952 break;
2953 case GT_EXPR:
2954 result = sgn0 > sgn1;
2955 break;
2956 case GE_EXPR:
2957 result = sgn0 >= sgn1;
2958 break;
2959 default:
2960 abort ();
2961 }
2962
2963 return convert (type, result ? integer_one_node : integer_zero_node);
2964 }
2965 \f
2966 /* Given EXP, a logical expression, set the range it is testing into
2967 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
2968 actually being tested. *PLOW and *PHIGH will be made of the same type
2969 as the returned expression. If EXP is not a comparison, we will most
2970 likely not be returning a useful value and range. */
2971
2972 static tree
2973 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
2974 {
2975 enum tree_code code;
2976 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
2977 tree orig_type = NULL_TREE;
2978 int in_p, n_in_p;
2979 tree low, high, n_low, n_high;
2980
2981 /* Start with simply saying "EXP != 0" and then look at the code of EXP
2982 and see if we can refine the range. Some of the cases below may not
2983 happen, but it doesn't seem worth worrying about this. We "continue"
2984 the outer loop when we've changed something; otherwise we "break"
2985 the switch, which will "break" the while. */
2986
2987 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
2988
2989 while (1)
2990 {
2991 code = TREE_CODE (exp);
2992
2993 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
2994 {
2995 if (first_rtl_op (code) > 0)
2996 arg0 = TREE_OPERAND (exp, 0);
2997 if (TREE_CODE_CLASS (code) == '<'
2998 || TREE_CODE_CLASS (code) == '1'
2999 || TREE_CODE_CLASS (code) == '2')
3000 type = TREE_TYPE (arg0);
3001 if (TREE_CODE_CLASS (code) == '2'
3002 || TREE_CODE_CLASS (code) == '<'
3003 || (TREE_CODE_CLASS (code) == 'e'
3004 && TREE_CODE_LENGTH (code) > 1))
3005 arg1 = TREE_OPERAND (exp, 1);
3006 }
3007
3008 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3009 lose a cast by accident. */
3010 if (type != NULL_TREE && orig_type == NULL_TREE)
3011 orig_type = type;
3012
3013 switch (code)
3014 {
3015 case TRUTH_NOT_EXPR:
3016 in_p = ! in_p, exp = arg0;
3017 continue;
3018
3019 case EQ_EXPR: case NE_EXPR:
3020 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3021 /* We can only do something if the range is testing for zero
3022 and if the second operand is an integer constant. Note that
3023 saying something is "in" the range we make is done by
3024 complementing IN_P since it will set in the initial case of
3025 being not equal to zero; "out" is leaving it alone. */
3026 if (low == 0 || high == 0
3027 || ! integer_zerop (low) || ! integer_zerop (high)
3028 || TREE_CODE (arg1) != INTEGER_CST)
3029 break;
3030
3031 switch (code)
3032 {
3033 case NE_EXPR: /* - [c, c] */
3034 low = high = arg1;
3035 break;
3036 case EQ_EXPR: /* + [c, c] */
3037 in_p = ! in_p, low = high = arg1;
3038 break;
3039 case GT_EXPR: /* - [-, c] */
3040 low = 0, high = arg1;
3041 break;
3042 case GE_EXPR: /* + [c, -] */
3043 in_p = ! in_p, low = arg1, high = 0;
3044 break;
3045 case LT_EXPR: /* - [c, -] */
3046 low = arg1, high = 0;
3047 break;
3048 case LE_EXPR: /* + [-, c] */
3049 in_p = ! in_p, low = 0, high = arg1;
3050 break;
3051 default:
3052 abort ();
3053 }
3054
3055 exp = arg0;
3056
3057 /* If this is an unsigned comparison, we also know that EXP is
3058 greater than or equal to zero. We base the range tests we make
3059 on that fact, so we record it here so we can parse existing
3060 range tests. */
3061 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3062 {
3063 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3064 1, convert (type, integer_zero_node),
3065 NULL_TREE))
3066 break;
3067
3068 in_p = n_in_p, low = n_low, high = n_high;
3069
3070 /* If the high bound is missing, but we
3071 have a low bound, reverse the range so
3072 it goes from zero to the low bound minus 1. */
3073 if (high == 0 && low)
3074 {
3075 in_p = ! in_p;
3076 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3077 integer_one_node, 0);
3078 low = convert (type, integer_zero_node);
3079 }
3080 }
3081 continue;
3082
3083 case NEGATE_EXPR:
3084 /* (-x) IN [a,b] -> x in [-b, -a] */
3085 n_low = range_binop (MINUS_EXPR, type,
3086 convert (type, integer_zero_node), 0, high, 1);
3087 n_high = range_binop (MINUS_EXPR, type,
3088 convert (type, integer_zero_node), 0, low, 0);
3089 low = n_low, high = n_high;
3090 exp = arg0;
3091 continue;
3092
3093 case BIT_NOT_EXPR:
3094 /* ~ X -> -X - 1 */
3095 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3096 convert (type, integer_one_node));
3097 continue;
3098
3099 case PLUS_EXPR: case MINUS_EXPR:
3100 if (TREE_CODE (arg1) != INTEGER_CST)
3101 break;
3102
3103 /* If EXP is signed, any overflow in the computation is undefined,
3104 so we don't worry about it so long as our computations on
3105 the bounds don't overflow. For unsigned, overflow is defined
3106 and this is exactly the right thing. */
3107 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3108 type, low, 0, arg1, 0);
3109 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3110 type, high, 1, arg1, 0);
3111 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3112 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3113 break;
3114
3115 /* Check for an unsigned range which has wrapped around the maximum
3116 value thus making n_high < n_low, and normalize it. */
3117 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3118 {
3119 low = range_binop (PLUS_EXPR, type, n_high, 0,
3120 integer_one_node, 0);
3121 high = range_binop (MINUS_EXPR, type, n_low, 0,
3122 integer_one_node, 0);
3123
3124 /* If the range is of the form +/- [ x+1, x ], we won't
3125 be able to normalize it. But then, it represents the
3126 whole range or the empty set, so make it
3127 +/- [ -, - ]. */
3128 if (tree_int_cst_equal (n_low, low)
3129 && tree_int_cst_equal (n_high, high))
3130 low = high = 0;
3131 else
3132 in_p = ! in_p;
3133 }
3134 else
3135 low = n_low, high = n_high;
3136
3137 exp = arg0;
3138 continue;
3139
3140 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3141 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3142 break;
3143
3144 if (! INTEGRAL_TYPE_P (type)
3145 || (low != 0 && ! int_fits_type_p (low, type))
3146 || (high != 0 && ! int_fits_type_p (high, type)))
3147 break;
3148
3149 n_low = low, n_high = high;
3150
3151 if (n_low != 0)
3152 n_low = convert (type, n_low);
3153
3154 if (n_high != 0)
3155 n_high = convert (type, n_high);
3156
3157 /* If we're converting from an unsigned to a signed type,
3158 we will be doing the comparison as unsigned. The tests above
3159 have already verified that LOW and HIGH are both positive.
3160
3161 So we have to make sure that the original unsigned value will
3162 be interpreted as positive. */
3163 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3164 {
3165 tree equiv_type = (*lang_hooks.types.type_for_mode)
3166 (TYPE_MODE (type), 1);
3167 tree high_positive;
3168
3169 /* A range without an upper bound is, naturally, unbounded.
3170 Since convert would have cropped a very large value, use
3171 the max value for the destination type. */
3172 high_positive
3173 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3174 : TYPE_MAX_VALUE (type);
3175
3176 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3177 high_positive = fold (build (RSHIFT_EXPR, type,
3178 convert (type, high_positive),
3179 convert (type, integer_one_node)));
3180
3181 /* If the low bound is specified, "and" the range with the
3182 range for which the original unsigned value will be
3183 positive. */
3184 if (low != 0)
3185 {
3186 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3187 1, n_low, n_high,
3188 1, convert (type, integer_zero_node),
3189 high_positive))
3190 break;
3191
3192 in_p = (n_in_p == in_p);
3193 }
3194 else
3195 {
3196 /* Otherwise, "or" the range with the range of the input
3197 that will be interpreted as negative. */
3198 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3199 0, n_low, n_high,
3200 1, convert (type, integer_zero_node),
3201 high_positive))
3202 break;
3203
3204 in_p = (in_p != n_in_p);
3205 }
3206 }
3207
3208 exp = arg0;
3209 low = n_low, high = n_high;
3210 continue;
3211
3212 default:
3213 break;
3214 }
3215
3216 break;
3217 }
3218
3219 /* If EXP is a constant, we can evaluate whether this is true or false. */
3220 if (TREE_CODE (exp) == INTEGER_CST)
3221 {
3222 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3223 exp, 0, low, 0))
3224 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3225 exp, 1, high, 1)));
3226 low = high = 0;
3227 exp = 0;
3228 }
3229
3230 *pin_p = in_p, *plow = low, *phigh = high;
3231 return exp;
3232 }
3233 \f
3234 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3235 type, TYPE, return an expression to test if EXP is in (or out of, depending
3236 on IN_P) the range. */
3237
3238 static tree
3239 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3240 {
3241 tree etype = TREE_TYPE (exp);
3242 tree value;
3243
3244 if (! in_p
3245 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3246 return invert_truthvalue (value);
3247
3248 if (low == 0 && high == 0)
3249 return convert (type, integer_one_node);
3250
3251 if (low == 0)
3252 return fold (build (LE_EXPR, type, exp, high));
3253
3254 if (high == 0)
3255 return fold (build (GE_EXPR, type, exp, low));
3256
3257 if (operand_equal_p (low, high, 0))
3258 return fold (build (EQ_EXPR, type, exp, low));
3259
3260 if (integer_zerop (low))
3261 {
3262 if (! TREE_UNSIGNED (etype))
3263 {
3264 etype = (*lang_hooks.types.unsigned_type) (etype);
3265 high = convert (etype, high);
3266 exp = convert (etype, exp);
3267 }
3268 return build_range_check (type, exp, 1, 0, high);
3269 }
3270
3271 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3272 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3273 {
3274 unsigned HOST_WIDE_INT lo;
3275 HOST_WIDE_INT hi;
3276 int prec;
3277
3278 prec = TYPE_PRECISION (etype);
3279 if (prec <= HOST_BITS_PER_WIDE_INT)
3280 {
3281 hi = 0;
3282 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3283 }
3284 else
3285 {
3286 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3287 lo = (unsigned HOST_WIDE_INT) -1;
3288 }
3289
3290 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3291 {
3292 if (TREE_UNSIGNED (etype))
3293 {
3294 etype = (*lang_hooks.types.signed_type) (etype);
3295 exp = convert (etype, exp);
3296 }
3297 return fold (build (GT_EXPR, type, exp,
3298 convert (etype, integer_zero_node)));
3299 }
3300 }
3301
3302 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3303 && ! TREE_OVERFLOW (value))
3304 return build_range_check (type,
3305 fold (build (MINUS_EXPR, etype, exp, low)),
3306 1, convert (etype, integer_zero_node), value);
3307
3308 return 0;
3309 }
3310 \f
3311 /* Given two ranges, see if we can merge them into one. Return 1 if we
3312 can, 0 if we can't. Set the output range into the specified parameters. */
3313
3314 static int
3315 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3316 tree high0, int in1_p, tree low1, tree high1)
3317 {
3318 int no_overlap;
3319 int subset;
3320 int temp;
3321 tree tem;
3322 int in_p;
3323 tree low, high;
3324 int lowequal = ((low0 == 0 && low1 == 0)
3325 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3326 low0, 0, low1, 0)));
3327 int highequal = ((high0 == 0 && high1 == 0)
3328 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3329 high0, 1, high1, 1)));
3330
3331 /* Make range 0 be the range that starts first, or ends last if they
3332 start at the same value. Swap them if it isn't. */
3333 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3334 low0, 0, low1, 0))
3335 || (lowequal
3336 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3337 high1, 1, high0, 1))))
3338 {
3339 temp = in0_p, in0_p = in1_p, in1_p = temp;
3340 tem = low0, low0 = low1, low1 = tem;
3341 tem = high0, high0 = high1, high1 = tem;
3342 }
3343
3344 /* Now flag two cases, whether the ranges are disjoint or whether the
3345 second range is totally subsumed in the first. Note that the tests
3346 below are simplified by the ones above. */
3347 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3348 high0, 1, low1, 0));
3349 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3350 high1, 1, high0, 1));
3351
3352 /* We now have four cases, depending on whether we are including or
3353 excluding the two ranges. */
3354 if (in0_p && in1_p)
3355 {
3356 /* If they don't overlap, the result is false. If the second range
3357 is a subset it is the result. Otherwise, the range is from the start
3358 of the second to the end of the first. */
3359 if (no_overlap)
3360 in_p = 0, low = high = 0;
3361 else if (subset)
3362 in_p = 1, low = low1, high = high1;
3363 else
3364 in_p = 1, low = low1, high = high0;
3365 }
3366
3367 else if (in0_p && ! in1_p)
3368 {
3369 /* If they don't overlap, the result is the first range. If they are
3370 equal, the result is false. If the second range is a subset of the
3371 first, and the ranges begin at the same place, we go from just after
3372 the end of the first range to the end of the second. If the second
3373 range is not a subset of the first, or if it is a subset and both
3374 ranges end at the same place, the range starts at the start of the
3375 first range and ends just before the second range.
3376 Otherwise, we can't describe this as a single range. */
3377 if (no_overlap)
3378 in_p = 1, low = low0, high = high0;
3379 else if (lowequal && highequal)
3380 in_p = 0, low = high = 0;
3381 else if (subset && lowequal)
3382 {
3383 in_p = 1, high = high0;
3384 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3385 integer_one_node, 0);
3386 }
3387 else if (! subset || highequal)
3388 {
3389 in_p = 1, low = low0;
3390 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3391 integer_one_node, 0);
3392 }
3393 else
3394 return 0;
3395 }
3396
3397 else if (! in0_p && in1_p)
3398 {
3399 /* If they don't overlap, the result is the second range. If the second
3400 is a subset of the first, the result is false. Otherwise,
3401 the range starts just after the first range and ends at the
3402 end of the second. */
3403 if (no_overlap)
3404 in_p = 1, low = low1, high = high1;
3405 else if (subset || highequal)
3406 in_p = 0, low = high = 0;
3407 else
3408 {
3409 in_p = 1, high = high1;
3410 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3411 integer_one_node, 0);
3412 }
3413 }
3414
3415 else
3416 {
3417 /* The case where we are excluding both ranges. Here the complex case
3418 is if they don't overlap. In that case, the only time we have a
3419 range is if they are adjacent. If the second is a subset of the
3420 first, the result is the first. Otherwise, the range to exclude
3421 starts at the beginning of the first range and ends at the end of the
3422 second. */
3423 if (no_overlap)
3424 {
3425 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3426 range_binop (PLUS_EXPR, NULL_TREE,
3427 high0, 1,
3428 integer_one_node, 1),
3429 1, low1, 0)))
3430 in_p = 0, low = low0, high = high1;
3431 else
3432 return 0;
3433 }
3434 else if (subset)
3435 in_p = 0, low = low0, high = high0;
3436 else
3437 in_p = 0, low = low0, high = high1;
3438 }
3439
3440 *pin_p = in_p, *plow = low, *phigh = high;
3441 return 1;
3442 }
3443 \f
3444 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3445 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3446 #endif
3447
3448 /* EXP is some logical combination of boolean tests. See if we can
3449 merge it into some range test. Return the new tree if so. */
3450
3451 static tree
3452 fold_range_test (tree exp)
3453 {
3454 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3455 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3456 int in0_p, in1_p, in_p;
3457 tree low0, low1, low, high0, high1, high;
3458 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3459 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3460 tree tem;
3461
3462 /* If this is an OR operation, invert both sides; we will invert
3463 again at the end. */
3464 if (or_op)
3465 in0_p = ! in0_p, in1_p = ! in1_p;
3466
3467 /* If both expressions are the same, if we can merge the ranges, and we
3468 can build the range test, return it or it inverted. If one of the
3469 ranges is always true or always false, consider it to be the same
3470 expression as the other. */
3471 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3472 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3473 in1_p, low1, high1)
3474 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3475 lhs != 0 ? lhs
3476 : rhs != 0 ? rhs : integer_zero_node,
3477 in_p, low, high))))
3478 return or_op ? invert_truthvalue (tem) : tem;
3479
3480 /* On machines where the branch cost is expensive, if this is a
3481 short-circuited branch and the underlying object on both sides
3482 is the same, make a non-short-circuit operation. */
3483 else if (RANGE_TEST_NON_SHORT_CIRCUIT
3484 && lhs != 0 && rhs != 0
3485 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3486 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3487 && operand_equal_p (lhs, rhs, 0))
3488 {
3489 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3490 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3491 which cases we can't do this. */
3492 if (simple_operand_p (lhs))
3493 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3494 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3495 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3496 TREE_OPERAND (exp, 1));
3497
3498 else if ((*lang_hooks.decls.global_bindings_p) () == 0
3499 && ! CONTAINS_PLACEHOLDER_P (lhs))
3500 {
3501 tree common = save_expr (lhs);
3502
3503 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3504 or_op ? ! in0_p : in0_p,
3505 low0, high0))
3506 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3507 or_op ? ! in1_p : in1_p,
3508 low1, high1))))
3509 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3510 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3511 TREE_TYPE (exp), lhs, rhs);
3512 }
3513 }
3514
3515 return 0;
3516 }
3517 \f
3518 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3519 bit value. Arrange things so the extra bits will be set to zero if and
3520 only if C is signed-extended to its full width. If MASK is nonzero,
3521 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3522
3523 static tree
3524 unextend (tree c, int p, int unsignedp, tree mask)
3525 {
3526 tree type = TREE_TYPE (c);
3527 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3528 tree temp;
3529
3530 if (p == modesize || unsignedp)
3531 return c;
3532
3533 /* We work by getting just the sign bit into the low-order bit, then
3534 into the high-order bit, then sign-extend. We then XOR that value
3535 with C. */
3536 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3537 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3538
3539 /* We must use a signed type in order to get an arithmetic right shift.
3540 However, we must also avoid introducing accidental overflows, so that
3541 a subsequent call to integer_zerop will work. Hence we must
3542 do the type conversion here. At this point, the constant is either
3543 zero or one, and the conversion to a signed type can never overflow.
3544 We could get an overflow if this conversion is done anywhere else. */
3545 if (TREE_UNSIGNED (type))
3546 temp = convert ((*lang_hooks.types.signed_type) (type), temp);
3547
3548 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3549 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3550 if (mask != 0)
3551 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3552 /* If necessary, convert the type back to match the type of C. */
3553 if (TREE_UNSIGNED (type))
3554 temp = convert (type, temp);
3555
3556 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3557 }
3558 \f
3559 /* Find ways of folding logical expressions of LHS and RHS:
3560 Try to merge two comparisons to the same innermost item.
3561 Look for range tests like "ch >= '0' && ch <= '9'".
3562 Look for combinations of simple terms on machines with expensive branches
3563 and evaluate the RHS unconditionally.
3564
3565 For example, if we have p->a == 2 && p->b == 4 and we can make an
3566 object large enough to span both A and B, we can do this with a comparison
3567 against the object ANDed with the a mask.
3568
3569 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3570 operations to do this with one comparison.
3571
3572 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3573 function and the one above.
3574
3575 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3576 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3577
3578 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3579 two operands.
3580
3581 We return the simplified tree or 0 if no optimization is possible. */
3582
3583 static tree
3584 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
3585 {
3586 /* If this is the "or" of two comparisons, we can do something if
3587 the comparisons are NE_EXPR. If this is the "and", we can do something
3588 if the comparisons are EQ_EXPR. I.e.,
3589 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3590
3591 WANTED_CODE is this operation code. For single bit fields, we can
3592 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3593 comparison for one-bit fields. */
3594
3595 enum tree_code wanted_code;
3596 enum tree_code lcode, rcode;
3597 tree ll_arg, lr_arg, rl_arg, rr_arg;
3598 tree ll_inner, lr_inner, rl_inner, rr_inner;
3599 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3600 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3601 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3602 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3603 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3604 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3605 enum machine_mode lnmode, rnmode;
3606 tree ll_mask, lr_mask, rl_mask, rr_mask;
3607 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3608 tree l_const, r_const;
3609 tree lntype, rntype, result;
3610 int first_bit, end_bit;
3611 int volatilep;
3612
3613 /* Start by getting the comparison codes. Fail if anything is volatile.
3614 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3615 it were surrounded with a NE_EXPR. */
3616
3617 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3618 return 0;
3619
3620 lcode = TREE_CODE (lhs);
3621 rcode = TREE_CODE (rhs);
3622
3623 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3624 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3625
3626 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3627 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3628
3629 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3630 return 0;
3631
3632 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3633 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3634
3635 ll_arg = TREE_OPERAND (lhs, 0);
3636 lr_arg = TREE_OPERAND (lhs, 1);
3637 rl_arg = TREE_OPERAND (rhs, 0);
3638 rr_arg = TREE_OPERAND (rhs, 1);
3639
3640 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3641 if (simple_operand_p (ll_arg)
3642 && simple_operand_p (lr_arg)
3643 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3644 {
3645 int compcode;
3646
3647 if (operand_equal_p (ll_arg, rl_arg, 0)
3648 && operand_equal_p (lr_arg, rr_arg, 0))
3649 {
3650 int lcompcode, rcompcode;
3651
3652 lcompcode = comparison_to_compcode (lcode);
3653 rcompcode = comparison_to_compcode (rcode);
3654 compcode = (code == TRUTH_AND_EXPR)
3655 ? lcompcode & rcompcode
3656 : lcompcode | rcompcode;
3657 }
3658 else if (operand_equal_p (ll_arg, rr_arg, 0)
3659 && operand_equal_p (lr_arg, rl_arg, 0))
3660 {
3661 int lcompcode, rcompcode;
3662
3663 rcode = swap_tree_comparison (rcode);
3664 lcompcode = comparison_to_compcode (lcode);
3665 rcompcode = comparison_to_compcode (rcode);
3666 compcode = (code == TRUTH_AND_EXPR)
3667 ? lcompcode & rcompcode
3668 : lcompcode | rcompcode;
3669 }
3670 else
3671 compcode = -1;
3672
3673 if (compcode == COMPCODE_TRUE)
3674 return convert (truth_type, integer_one_node);
3675 else if (compcode == COMPCODE_FALSE)
3676 return convert (truth_type, integer_zero_node);
3677 else if (compcode != -1)
3678 return build (compcode_to_comparison (compcode),
3679 truth_type, ll_arg, lr_arg);
3680 }
3681
3682 /* If the RHS can be evaluated unconditionally and its operands are
3683 simple, it wins to evaluate the RHS unconditionally on machines
3684 with expensive branches. In this case, this isn't a comparison
3685 that can be merged. Avoid doing this if the RHS is a floating-point
3686 comparison since those can trap. */
3687
3688 if (BRANCH_COST >= 2
3689 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3690 && simple_operand_p (rl_arg)
3691 && simple_operand_p (rr_arg))
3692 {
3693 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
3694 if (code == TRUTH_OR_EXPR
3695 && lcode == NE_EXPR && integer_zerop (lr_arg)
3696 && rcode == NE_EXPR && integer_zerop (rr_arg)
3697 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3698 return build (NE_EXPR, truth_type,
3699 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3700 ll_arg, rl_arg),
3701 integer_zero_node);
3702
3703 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
3704 if (code == TRUTH_AND_EXPR
3705 && lcode == EQ_EXPR && integer_zerop (lr_arg)
3706 && rcode == EQ_EXPR && integer_zerop (rr_arg)
3707 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3708 return build (EQ_EXPR, truth_type,
3709 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3710 ll_arg, rl_arg),
3711 integer_zero_node);
3712
3713 return build (code, truth_type, lhs, rhs);
3714 }
3715
3716 /* See if the comparisons can be merged. Then get all the parameters for
3717 each side. */
3718
3719 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3720 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3721 return 0;
3722
3723 volatilep = 0;
3724 ll_inner = decode_field_reference (ll_arg,
3725 &ll_bitsize, &ll_bitpos, &ll_mode,
3726 &ll_unsignedp, &volatilep, &ll_mask,
3727 &ll_and_mask);
3728 lr_inner = decode_field_reference (lr_arg,
3729 &lr_bitsize, &lr_bitpos, &lr_mode,
3730 &lr_unsignedp, &volatilep, &lr_mask,
3731 &lr_and_mask);
3732 rl_inner = decode_field_reference (rl_arg,
3733 &rl_bitsize, &rl_bitpos, &rl_mode,
3734 &rl_unsignedp, &volatilep, &rl_mask,
3735 &rl_and_mask);
3736 rr_inner = decode_field_reference (rr_arg,
3737 &rr_bitsize, &rr_bitpos, &rr_mode,
3738 &rr_unsignedp, &volatilep, &rr_mask,
3739 &rr_and_mask);
3740
3741 /* It must be true that the inner operation on the lhs of each
3742 comparison must be the same if we are to be able to do anything.
3743 Then see if we have constants. If not, the same must be true for
3744 the rhs's. */
3745 if (volatilep || ll_inner == 0 || rl_inner == 0
3746 || ! operand_equal_p (ll_inner, rl_inner, 0))
3747 return 0;
3748
3749 if (TREE_CODE (lr_arg) == INTEGER_CST
3750 && TREE_CODE (rr_arg) == INTEGER_CST)
3751 l_const = lr_arg, r_const = rr_arg;
3752 else if (lr_inner == 0 || rr_inner == 0
3753 || ! operand_equal_p (lr_inner, rr_inner, 0))
3754 return 0;
3755 else
3756 l_const = r_const = 0;
3757
3758 /* If either comparison code is not correct for our logical operation,
3759 fail. However, we can convert a one-bit comparison against zero into
3760 the opposite comparison against that bit being set in the field. */
3761
3762 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3763 if (lcode != wanted_code)
3764 {
3765 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3766 {
3767 /* Make the left operand unsigned, since we are only interested
3768 in the value of one bit. Otherwise we are doing the wrong
3769 thing below. */
3770 ll_unsignedp = 1;
3771 l_const = ll_mask;
3772 }
3773 else
3774 return 0;
3775 }
3776
3777 /* This is analogous to the code for l_const above. */
3778 if (rcode != wanted_code)
3779 {
3780 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3781 {
3782 rl_unsignedp = 1;
3783 r_const = rl_mask;
3784 }
3785 else
3786 return 0;
3787 }
3788
3789 /* After this point all optimizations will generate bit-field
3790 references, which we might not want. */
3791 if (! (*lang_hooks.can_use_bit_fields_p) ())
3792 return 0;
3793
3794 /* See if we can find a mode that contains both fields being compared on
3795 the left. If we can't, fail. Otherwise, update all constants and masks
3796 to be relative to a field of that size. */
3797 first_bit = MIN (ll_bitpos, rl_bitpos);
3798 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3799 lnmode = get_best_mode (end_bit - first_bit, first_bit,
3800 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3801 volatilep);
3802 if (lnmode == VOIDmode)
3803 return 0;
3804
3805 lnbitsize = GET_MODE_BITSIZE (lnmode);
3806 lnbitpos = first_bit & ~ (lnbitsize - 1);
3807 lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
3808 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3809
3810 if (BYTES_BIG_ENDIAN)
3811 {
3812 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3813 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3814 }
3815
3816 ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
3817 size_int (xll_bitpos), 0);
3818 rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
3819 size_int (xrl_bitpos), 0);
3820
3821 if (l_const)
3822 {
3823 l_const = convert (lntype, l_const);
3824 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3825 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3826 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3827 fold (build1 (BIT_NOT_EXPR,
3828 lntype, ll_mask)),
3829 0)))
3830 {
3831 warning ("comparison is always %d", wanted_code == NE_EXPR);
3832
3833 return convert (truth_type,
3834 wanted_code == NE_EXPR
3835 ? integer_one_node : integer_zero_node);
3836 }
3837 }
3838 if (r_const)
3839 {
3840 r_const = convert (lntype, r_const);
3841 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3842 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3843 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3844 fold (build1 (BIT_NOT_EXPR,
3845 lntype, rl_mask)),
3846 0)))
3847 {
3848 warning ("comparison is always %d", wanted_code == NE_EXPR);
3849
3850 return convert (truth_type,
3851 wanted_code == NE_EXPR
3852 ? integer_one_node : integer_zero_node);
3853 }
3854 }
3855
3856 /* If the right sides are not constant, do the same for it. Also,
3857 disallow this optimization if a size or signedness mismatch occurs
3858 between the left and right sides. */
3859 if (l_const == 0)
3860 {
3861 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3862 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3863 /* Make sure the two fields on the right
3864 correspond to the left without being swapped. */
3865 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3866 return 0;
3867
3868 first_bit = MIN (lr_bitpos, rr_bitpos);
3869 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3870 rnmode = get_best_mode (end_bit - first_bit, first_bit,
3871 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3872 volatilep);
3873 if (rnmode == VOIDmode)
3874 return 0;
3875
3876 rnbitsize = GET_MODE_BITSIZE (rnmode);
3877 rnbitpos = first_bit & ~ (rnbitsize - 1);
3878 rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
3879 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3880
3881 if (BYTES_BIG_ENDIAN)
3882 {
3883 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3884 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3885 }
3886
3887 lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
3888 size_int (xlr_bitpos), 0);
3889 rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
3890 size_int (xrr_bitpos), 0);
3891
3892 /* Make a mask that corresponds to both fields being compared.
3893 Do this for both items being compared. If the operands are the
3894 same size and the bits being compared are in the same position
3895 then we can do this by masking both and comparing the masked
3896 results. */
3897 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3898 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3899 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
3900 {
3901 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3902 ll_unsignedp || rl_unsignedp);
3903 if (! all_ones_mask_p (ll_mask, lnbitsize))
3904 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
3905
3906 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
3907 lr_unsignedp || rr_unsignedp);
3908 if (! all_ones_mask_p (lr_mask, rnbitsize))
3909 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
3910
3911 return build (wanted_code, truth_type, lhs, rhs);
3912 }
3913
3914 /* There is still another way we can do something: If both pairs of
3915 fields being compared are adjacent, we may be able to make a wider
3916 field containing them both.
3917
3918 Note that we still must mask the lhs/rhs expressions. Furthermore,
3919 the mask must be shifted to account for the shift done by
3920 make_bit_field_ref. */
3921 if ((ll_bitsize + ll_bitpos == rl_bitpos
3922 && lr_bitsize + lr_bitpos == rr_bitpos)
3923 || (ll_bitpos == rl_bitpos + rl_bitsize
3924 && lr_bitpos == rr_bitpos + rr_bitsize))
3925 {
3926 tree type;
3927
3928 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
3929 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
3930 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
3931 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
3932
3933 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
3934 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
3935 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
3936 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
3937
3938 /* Convert to the smaller type before masking out unwanted bits. */
3939 type = lntype;
3940 if (lntype != rntype)
3941 {
3942 if (lnbitsize > rnbitsize)
3943 {
3944 lhs = convert (rntype, lhs);
3945 ll_mask = convert (rntype, ll_mask);
3946 type = rntype;
3947 }
3948 else if (lnbitsize < rnbitsize)
3949 {
3950 rhs = convert (lntype, rhs);
3951 lr_mask = convert (lntype, lr_mask);
3952 type = lntype;
3953 }
3954 }
3955
3956 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
3957 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3958
3959 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
3960 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
3961
3962 return build (wanted_code, truth_type, lhs, rhs);
3963 }
3964
3965 return 0;
3966 }
3967
3968 /* Handle the case of comparisons with constants. If there is something in
3969 common between the masks, those bits of the constants must be the same.
3970 If not, the condition is always false. Test for this to avoid generating
3971 incorrect code below. */
3972 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3973 if (! integer_zerop (result)
3974 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3975 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3976 {
3977 if (wanted_code == NE_EXPR)
3978 {
3979 warning ("`or' of unmatched not-equal tests is always 1");
3980 return convert (truth_type, integer_one_node);
3981 }
3982 else
3983 {
3984 warning ("`and' of mutually exclusive equal-tests is always 0");
3985 return convert (truth_type, integer_zero_node);
3986 }
3987 }
3988
3989 /* Construct the expression we will return. First get the component
3990 reference we will make. Unless the mask is all ones the width of
3991 that field, perform the mask operation. Then compare with the
3992 merged constant. */
3993 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3994 ll_unsignedp || rl_unsignedp);
3995
3996 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3997 if (! all_ones_mask_p (ll_mask, lnbitsize))
3998 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
3999
4000 return build (wanted_code, truth_type, result,
4001 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4002 }
4003 \f
4004 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4005 constant. */
4006
4007 static tree
4008 optimize_minmax_comparison (tree t)
4009 {
4010 tree type = TREE_TYPE (t);
4011 tree arg0 = TREE_OPERAND (t, 0);
4012 enum tree_code op_code;
4013 tree comp_const = TREE_OPERAND (t, 1);
4014 tree minmax_const;
4015 int consts_equal, consts_lt;
4016 tree inner;
4017
4018 STRIP_SIGN_NOPS (arg0);
4019
4020 op_code = TREE_CODE (arg0);
4021 minmax_const = TREE_OPERAND (arg0, 1);
4022 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4023 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4024 inner = TREE_OPERAND (arg0, 0);
4025
4026 /* If something does not permit us to optimize, return the original tree. */
4027 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4028 || TREE_CODE (comp_const) != INTEGER_CST
4029 || TREE_CONSTANT_OVERFLOW (comp_const)
4030 || TREE_CODE (minmax_const) != INTEGER_CST
4031 || TREE_CONSTANT_OVERFLOW (minmax_const))
4032 return t;
4033
4034 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4035 and GT_EXPR, doing the rest with recursive calls using logical
4036 simplifications. */
4037 switch (TREE_CODE (t))
4038 {
4039 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4040 return
4041 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4042
4043 case GE_EXPR:
4044 return
4045 fold (build (TRUTH_ORIF_EXPR, type,
4046 optimize_minmax_comparison
4047 (build (EQ_EXPR, type, arg0, comp_const)),
4048 optimize_minmax_comparison
4049 (build (GT_EXPR, type, arg0, comp_const))));
4050
4051 case EQ_EXPR:
4052 if (op_code == MAX_EXPR && consts_equal)
4053 /* MAX (X, 0) == 0 -> X <= 0 */
4054 return fold (build (LE_EXPR, type, inner, comp_const));
4055
4056 else if (op_code == MAX_EXPR && consts_lt)
4057 /* MAX (X, 0) == 5 -> X == 5 */
4058 return fold (build (EQ_EXPR, type, inner, comp_const));
4059
4060 else if (op_code == MAX_EXPR)
4061 /* MAX (X, 0) == -1 -> false */
4062 return omit_one_operand (type, integer_zero_node, inner);
4063
4064 else if (consts_equal)
4065 /* MIN (X, 0) == 0 -> X >= 0 */
4066 return fold (build (GE_EXPR, type, inner, comp_const));
4067
4068 else if (consts_lt)
4069 /* MIN (X, 0) == 5 -> false */
4070 return omit_one_operand (type, integer_zero_node, inner);
4071
4072 else
4073 /* MIN (X, 0) == -1 -> X == -1 */
4074 return fold (build (EQ_EXPR, type, inner, comp_const));
4075
4076 case GT_EXPR:
4077 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4078 /* MAX (X, 0) > 0 -> X > 0
4079 MAX (X, 0) > 5 -> X > 5 */
4080 return fold (build (GT_EXPR, type, inner, comp_const));
4081
4082 else if (op_code == MAX_EXPR)
4083 /* MAX (X, 0) > -1 -> true */
4084 return omit_one_operand (type, integer_one_node, inner);
4085
4086 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4087 /* MIN (X, 0) > 0 -> false
4088 MIN (X, 0) > 5 -> false */
4089 return omit_one_operand (type, integer_zero_node, inner);
4090
4091 else
4092 /* MIN (X, 0) > -1 -> X > -1 */
4093 return fold (build (GT_EXPR, type, inner, comp_const));
4094
4095 default:
4096 return t;
4097 }
4098 }
4099 \f
4100 /* T is an integer expression that is being multiplied, divided, or taken a
4101 modulus (CODE says which and what kind of divide or modulus) by a
4102 constant C. See if we can eliminate that operation by folding it with
4103 other operations already in T. WIDE_TYPE, if non-null, is a type that
4104 should be used for the computation if wider than our type.
4105
4106 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4107 (X * 2) + (Y * 4). We must, however, be assured that either the original
4108 expression would not overflow or that overflow is undefined for the type
4109 in the language in question.
4110
4111 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4112 the machine has a multiply-accumulate insn or that this is part of an
4113 addressing calculation.
4114
4115 If we return a non-null expression, it is an equivalent form of the
4116 original computation, but need not be in the original type. */
4117
4118 static tree
4119 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4120 {
4121 /* To avoid exponential search depth, refuse to allow recursion past
4122 three levels. Beyond that (1) it's highly unlikely that we'll find
4123 something interesting and (2) we've probably processed it before
4124 when we built the inner expression. */
4125
4126 static int depth;
4127 tree ret;
4128
4129 if (depth > 3)
4130 return NULL;
4131
4132 depth++;
4133 ret = extract_muldiv_1 (t, c, code, wide_type);
4134 depth--;
4135
4136 return ret;
4137 }
4138
4139 static tree
4140 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4141 {
4142 tree type = TREE_TYPE (t);
4143 enum tree_code tcode = TREE_CODE (t);
4144 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4145 > GET_MODE_SIZE (TYPE_MODE (type)))
4146 ? wide_type : type);
4147 tree t1, t2;
4148 int same_p = tcode == code;
4149 tree op0 = NULL_TREE, op1 = NULL_TREE;
4150
4151 /* Don't deal with constants of zero here; they confuse the code below. */
4152 if (integer_zerop (c))
4153 return NULL_TREE;
4154
4155 if (TREE_CODE_CLASS (tcode) == '1')
4156 op0 = TREE_OPERAND (t, 0);
4157
4158 if (TREE_CODE_CLASS (tcode) == '2')
4159 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4160
4161 /* Note that we need not handle conditional operations here since fold
4162 already handles those cases. So just do arithmetic here. */
4163 switch (tcode)
4164 {
4165 case INTEGER_CST:
4166 /* For a constant, we can always simplify if we are a multiply
4167 or (for divide and modulus) if it is a multiple of our constant. */
4168 if (code == MULT_EXPR
4169 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4170 return const_binop (code, convert (ctype, t), convert (ctype, c), 0);
4171 break;
4172
4173 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4174 /* If op0 is an expression ... */
4175 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4176 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4177 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4178 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4179 /* ... and is unsigned, and its type is smaller than ctype,
4180 then we cannot pass through as widening. */
4181 && ((TREE_UNSIGNED (TREE_TYPE (op0))
4182 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4183 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4184 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4185 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4186 /* ... or its type is larger than ctype,
4187 then we cannot pass through this truncation. */
4188 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4189 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4190 /* ... or signedness changes for division or modulus,
4191 then we cannot pass through this conversion. */
4192 || (code != MULT_EXPR
4193 && (TREE_UNSIGNED (ctype)
4194 != TREE_UNSIGNED (TREE_TYPE (op0))))))
4195 break;
4196
4197 /* Pass the constant down and see if we can make a simplification. If
4198 we can, replace this expression with the inner simplification for
4199 possible later conversion to our or some other type. */
4200 if ((t2 = convert (TREE_TYPE (op0), c)) != 0
4201 && TREE_CODE (t2) == INTEGER_CST
4202 && ! TREE_CONSTANT_OVERFLOW (t2)
4203 && (0 != (t1 = extract_muldiv (op0, t2, code,
4204 code == MULT_EXPR
4205 ? ctype : NULL_TREE))))
4206 return t1;
4207 break;
4208
4209 case NEGATE_EXPR: case ABS_EXPR:
4210 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4211 return fold (build1 (tcode, ctype, convert (ctype, t1)));
4212 break;
4213
4214 case MIN_EXPR: case MAX_EXPR:
4215 /* If widening the type changes the signedness, then we can't perform
4216 this optimization as that changes the result. */
4217 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4218 break;
4219
4220 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4221 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4222 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4223 {
4224 if (tree_int_cst_sgn (c) < 0)
4225 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4226
4227 return fold (build (tcode, ctype, convert (ctype, t1),
4228 convert (ctype, t2)));
4229 }
4230 break;
4231
4232 case WITH_RECORD_EXPR:
4233 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4234 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4235 TREE_OPERAND (t, 1));
4236 break;
4237
4238 case LSHIFT_EXPR: case RSHIFT_EXPR:
4239 /* If the second operand is constant, this is a multiplication
4240 or floor division, by a power of two, so we can treat it that
4241 way unless the multiplier or divisor overflows. */
4242 if (TREE_CODE (op1) == INTEGER_CST
4243 /* const_binop may not detect overflow correctly,
4244 so check for it explicitly here. */
4245 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4246 && TREE_INT_CST_HIGH (op1) == 0
4247 && 0 != (t1 = convert (ctype,
4248 const_binop (LSHIFT_EXPR, size_one_node,
4249 op1, 0)))
4250 && ! TREE_OVERFLOW (t1))
4251 return extract_muldiv (build (tcode == LSHIFT_EXPR
4252 ? MULT_EXPR : FLOOR_DIV_EXPR,
4253 ctype, convert (ctype, op0), t1),
4254 c, code, wide_type);
4255 break;
4256
4257 case PLUS_EXPR: case MINUS_EXPR:
4258 /* See if we can eliminate the operation on both sides. If we can, we
4259 can return a new PLUS or MINUS. If we can't, the only remaining
4260 cases where we can do anything are if the second operand is a
4261 constant. */
4262 t1 = extract_muldiv (op0, c, code, wide_type);
4263 t2 = extract_muldiv (op1, c, code, wide_type);
4264 if (t1 != 0 && t2 != 0
4265 && (code == MULT_EXPR
4266 /* If not multiplication, we can only do this if both operands
4267 are divisible by c. */
4268 || (multiple_of_p (ctype, op0, c)
4269 && multiple_of_p (ctype, op1, c))))
4270 return fold (build (tcode, ctype, convert (ctype, t1),
4271 convert (ctype, t2)));
4272
4273 /* If this was a subtraction, negate OP1 and set it to be an addition.
4274 This simplifies the logic below. */
4275 if (tcode == MINUS_EXPR)
4276 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4277
4278 if (TREE_CODE (op1) != INTEGER_CST)
4279 break;
4280
4281 /* If either OP1 or C are negative, this optimization is not safe for
4282 some of the division and remainder types while for others we need
4283 to change the code. */
4284 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4285 {
4286 if (code == CEIL_DIV_EXPR)
4287 code = FLOOR_DIV_EXPR;
4288 else if (code == FLOOR_DIV_EXPR)
4289 code = CEIL_DIV_EXPR;
4290 else if (code != MULT_EXPR
4291 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4292 break;
4293 }
4294
4295 /* If it's a multiply or a division/modulus operation of a multiple
4296 of our constant, do the operation and verify it doesn't overflow. */
4297 if (code == MULT_EXPR
4298 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4299 {
4300 op1 = const_binop (code, convert (ctype, op1), convert (ctype, c), 0);
4301 if (op1 == 0 || TREE_OVERFLOW (op1))
4302 break;
4303 }
4304 else
4305 break;
4306
4307 /* If we have an unsigned type is not a sizetype, we cannot widen
4308 the operation since it will change the result if the original
4309 computation overflowed. */
4310 if (TREE_UNSIGNED (ctype)
4311 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4312 && ctype != type)
4313 break;
4314
4315 /* If we were able to eliminate our operation from the first side,
4316 apply our operation to the second side and reform the PLUS. */
4317 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4318 return fold (build (tcode, ctype, convert (ctype, t1), op1));
4319
4320 /* The last case is if we are a multiply. In that case, we can
4321 apply the distributive law to commute the multiply and addition
4322 if the multiplication of the constants doesn't overflow. */
4323 if (code == MULT_EXPR)
4324 return fold (build (tcode, ctype, fold (build (code, ctype,
4325 convert (ctype, op0),
4326 convert (ctype, c))),
4327 op1));
4328
4329 break;
4330
4331 case MULT_EXPR:
4332 /* We have a special case here if we are doing something like
4333 (C * 8) % 4 since we know that's zero. */
4334 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4335 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4336 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4337 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4338 return omit_one_operand (type, integer_zero_node, op0);
4339
4340 /* ... fall through ... */
4341
4342 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4343 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4344 /* If we can extract our operation from the LHS, do so and return a
4345 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4346 do something only if the second operand is a constant. */
4347 if (same_p
4348 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4349 return fold (build (tcode, ctype, convert (ctype, t1),
4350 convert (ctype, op1)));
4351 else if (tcode == MULT_EXPR && code == MULT_EXPR
4352 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4353 return fold (build (tcode, ctype, convert (ctype, op0),
4354 convert (ctype, t1)));
4355 else if (TREE_CODE (op1) != INTEGER_CST)
4356 return 0;
4357
4358 /* If these are the same operation types, we can associate them
4359 assuming no overflow. */
4360 if (tcode == code
4361 && 0 != (t1 = const_binop (MULT_EXPR, convert (ctype, op1),
4362 convert (ctype, c), 0))
4363 && ! TREE_OVERFLOW (t1))
4364 return fold (build (tcode, ctype, convert (ctype, op0), t1));
4365
4366 /* If these operations "cancel" each other, we have the main
4367 optimizations of this pass, which occur when either constant is a
4368 multiple of the other, in which case we replace this with either an
4369 operation or CODE or TCODE.
4370
4371 If we have an unsigned type that is not a sizetype, we cannot do
4372 this since it will change the result if the original computation
4373 overflowed. */
4374 if ((! TREE_UNSIGNED (ctype)
4375 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4376 && ! flag_wrapv
4377 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4378 || (tcode == MULT_EXPR
4379 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4380 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4381 {
4382 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4383 return fold (build (tcode, ctype, convert (ctype, op0),
4384 convert (ctype,
4385 const_binop (TRUNC_DIV_EXPR,
4386 op1, c, 0))));
4387 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4388 return fold (build (code, ctype, convert (ctype, op0),
4389 convert (ctype,
4390 const_binop (TRUNC_DIV_EXPR,
4391 c, op1, 0))));
4392 }
4393 break;
4394
4395 default:
4396 break;
4397 }
4398
4399 return 0;
4400 }
4401 \f
4402 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4403 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4404 that we may sometimes modify the tree. */
4405
4406 static tree
4407 strip_compound_expr (tree t, tree s)
4408 {
4409 enum tree_code code = TREE_CODE (t);
4410
4411 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4412 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4413 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4414 return TREE_OPERAND (t, 1);
4415
4416 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4417 don't bother handling any other types. */
4418 else if (code == COND_EXPR)
4419 {
4420 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4421 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4422 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4423 }
4424 else if (TREE_CODE_CLASS (code) == '1')
4425 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4426 else if (TREE_CODE_CLASS (code) == '<'
4427 || TREE_CODE_CLASS (code) == '2')
4428 {
4429 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4430 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4431 }
4432
4433 return t;
4434 }
4435 \f
4436 /* Return a node which has the indicated constant VALUE (either 0 or
4437 1), and is of the indicated TYPE. */
4438
4439 static tree
4440 constant_boolean_node (int value, tree type)
4441 {
4442 if (type == integer_type_node)
4443 return value ? integer_one_node : integer_zero_node;
4444 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4445 return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4446 integer_zero_node);
4447 else
4448 {
4449 tree t = build_int_2 (value, 0);
4450
4451 TREE_TYPE (t) = type;
4452 return t;
4453 }
4454 }
4455
4456 /* Utility function for the following routine, to see how complex a nesting of
4457 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4458 we don't care (to avoid spending too much time on complex expressions.). */
4459
4460 static int
4461 count_cond (tree expr, int lim)
4462 {
4463 int ctrue, cfalse;
4464
4465 if (TREE_CODE (expr) != COND_EXPR)
4466 return 0;
4467 else if (lim <= 0)
4468 return 0;
4469
4470 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4471 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4472 return MIN (lim, 1 + ctrue + cfalse);
4473 }
4474
4475 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4476 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4477 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4478 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4479 COND is the first argument to CODE; otherwise (as in the example
4480 given here), it is the second argument. TYPE is the type of the
4481 original expression. */
4482
4483 static tree
4484 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
4485 tree cond, tree arg, int cond_first_p)
4486 {
4487 tree test, true_value, false_value;
4488 tree lhs = NULL_TREE;
4489 tree rhs = NULL_TREE;
4490 /* In the end, we'll produce a COND_EXPR. Both arms of the
4491 conditional expression will be binary operations. The left-hand
4492 side of the expression to be executed if the condition is true
4493 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4494 of the expression to be executed if the condition is true will be
4495 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4496 but apply to the expression to be executed if the conditional is
4497 false. */
4498 tree *true_lhs;
4499 tree *true_rhs;
4500 tree *false_lhs;
4501 tree *false_rhs;
4502 /* These are the codes to use for the left-hand side and right-hand
4503 side of the COND_EXPR. Normally, they are the same as CODE. */
4504 enum tree_code lhs_code = code;
4505 enum tree_code rhs_code = code;
4506 /* And these are the types of the expressions. */
4507 tree lhs_type = type;
4508 tree rhs_type = type;
4509 int save = 0;
4510
4511 if (cond_first_p)
4512 {
4513 true_rhs = false_rhs = &arg;
4514 true_lhs = &true_value;
4515 false_lhs = &false_value;
4516 }
4517 else
4518 {
4519 true_lhs = false_lhs = &arg;
4520 true_rhs = &true_value;
4521 false_rhs = &false_value;
4522 }
4523
4524 if (TREE_CODE (cond) == COND_EXPR)
4525 {
4526 test = TREE_OPERAND (cond, 0);
4527 true_value = TREE_OPERAND (cond, 1);
4528 false_value = TREE_OPERAND (cond, 2);
4529 /* If this operand throws an expression, then it does not make
4530 sense to try to perform a logical or arithmetic operation
4531 involving it. Instead of building `a + throw 3' for example,
4532 we simply build `a, throw 3'. */
4533 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4534 {
4535 if (! cond_first_p)
4536 {
4537 lhs_code = COMPOUND_EXPR;
4538 lhs_type = void_type_node;
4539 }
4540 else
4541 lhs = true_value;
4542 }
4543 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4544 {
4545 if (! cond_first_p)
4546 {
4547 rhs_code = COMPOUND_EXPR;
4548 rhs_type = void_type_node;
4549 }
4550 else
4551 rhs = false_value;
4552 }
4553 }
4554 else
4555 {
4556 tree testtype = TREE_TYPE (cond);
4557 test = cond;
4558 true_value = convert (testtype, integer_one_node);
4559 false_value = convert (testtype, integer_zero_node);
4560 }
4561
4562 /* If ARG is complex we want to make sure we only evaluate it once. Though
4563 this is only required if it is volatile, it might be more efficient even
4564 if it is not. However, if we succeed in folding one part to a constant,
4565 we do not need to make this SAVE_EXPR. Since we do this optimization
4566 primarily to see if we do end up with constant and this SAVE_EXPR
4567 interferes with later optimizations, suppressing it when we can is
4568 important.
4569
4570 If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4571 do so. Don't try to see if the result is a constant if an arm is a
4572 COND_EXPR since we get exponential behavior in that case. */
4573
4574 if (saved_expr_p (arg))
4575 save = 1;
4576 else if (lhs == 0 && rhs == 0
4577 && !TREE_CONSTANT (arg)
4578 && (*lang_hooks.decls.global_bindings_p) () == 0
4579 && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4580 || TREE_SIDE_EFFECTS (arg)))
4581 {
4582 if (TREE_CODE (true_value) != COND_EXPR)
4583 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4584
4585 if (TREE_CODE (false_value) != COND_EXPR)
4586 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4587
4588 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4589 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4590 {
4591 arg = save_expr (arg);
4592 lhs = rhs = 0;
4593 save = 1;
4594 }
4595 }
4596
4597 if (lhs == 0)
4598 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4599 if (rhs == 0)
4600 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4601
4602 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4603
4604 if (save)
4605 return build (COMPOUND_EXPR, type,
4606 convert (void_type_node, arg),
4607 strip_compound_expr (test, arg));
4608 else
4609 return convert (type, test);
4610 }
4611
4612 \f
4613 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4614
4615 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4616 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4617 ADDEND is the same as X.
4618
4619 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4620 and finite. The problematic cases are when X is zero, and its mode
4621 has signed zeros. In the case of rounding towards -infinity,
4622 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4623 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4624
4625 static bool
4626 fold_real_zero_addition_p (tree type, tree addend, int negate)
4627 {
4628 if (!real_zerop (addend))
4629 return false;
4630
4631 /* Don't allow the fold with -fsignaling-nans. */
4632 if (HONOR_SNANS (TYPE_MODE (type)))
4633 return false;
4634
4635 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4636 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4637 return true;
4638
4639 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4640 if (TREE_CODE (addend) == REAL_CST
4641 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4642 negate = !negate;
4643
4644 /* The mode has signed zeros, and we have to honor their sign.
4645 In this situation, there is only one case we can return true for.
4646 X - 0 is the same as X unless rounding towards -infinity is
4647 supported. */
4648 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4649 }
4650
4651 /* Subroutine of fold() that checks comparisons of built-in math
4652 functions against real constants.
4653
4654 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
4655 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
4656 is the type of the result and ARG0 and ARG1 are the operands of the
4657 comparison. ARG1 must be a TREE_REAL_CST.
4658
4659 The function returns the constant folded tree if a simplification
4660 can be made, and NULL_TREE otherwise. */
4661
4662 static tree
4663 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
4664 tree type, tree arg0, tree arg1)
4665 {
4666 REAL_VALUE_TYPE c;
4667
4668 if (fcode == BUILT_IN_SQRT
4669 || fcode == BUILT_IN_SQRTF
4670 || fcode == BUILT_IN_SQRTL)
4671 {
4672 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
4673 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
4674
4675 c = TREE_REAL_CST (arg1);
4676 if (REAL_VALUE_NEGATIVE (c))
4677 {
4678 /* sqrt(x) < y is always false, if y is negative. */
4679 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
4680 return omit_one_operand (type,
4681 convert (type, integer_zero_node),
4682 arg);
4683
4684 /* sqrt(x) > y is always true, if y is negative and we
4685 don't care about NaNs, i.e. negative values of x. */
4686 if (code == NE_EXPR || !HONOR_NANS (mode))
4687 return omit_one_operand (type,
4688 convert (type, integer_one_node),
4689 arg);
4690
4691 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
4692 return fold (build (GE_EXPR, type, arg,
4693 build_real (TREE_TYPE (arg), dconst0)));
4694 }
4695 else if (code == GT_EXPR || code == GE_EXPR)
4696 {
4697 REAL_VALUE_TYPE c2;
4698
4699 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4700 real_convert (&c2, mode, &c2);
4701
4702 if (REAL_VALUE_ISINF (c2))
4703 {
4704 /* sqrt(x) > y is x == +Inf, when y is very large. */
4705 if (HONOR_INFINITIES (mode))
4706 return fold (build (EQ_EXPR, type, arg,
4707 build_real (TREE_TYPE (arg), c2)));
4708
4709 /* sqrt(x) > y is always false, when y is very large
4710 and we don't care about infinities. */
4711 return omit_one_operand (type,
4712 convert (type, integer_zero_node),
4713 arg);
4714 }
4715
4716 /* sqrt(x) > c is the same as x > c*c. */
4717 return fold (build (code, type, arg,
4718 build_real (TREE_TYPE (arg), c2)));
4719 }
4720 else if (code == LT_EXPR || code == LE_EXPR)
4721 {
4722 REAL_VALUE_TYPE c2;
4723
4724 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4725 real_convert (&c2, mode, &c2);
4726
4727 if (REAL_VALUE_ISINF (c2))
4728 {
4729 /* sqrt(x) < y is always true, when y is a very large
4730 value and we don't care about NaNs or Infinities. */
4731 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
4732 return omit_one_operand (type,
4733 convert (type, integer_one_node),
4734 arg);
4735
4736 /* sqrt(x) < y is x != +Inf when y is very large and we
4737 don't care about NaNs. */
4738 if (! HONOR_NANS (mode))
4739 return fold (build (NE_EXPR, type, arg,
4740 build_real (TREE_TYPE (arg), c2)));
4741
4742 /* sqrt(x) < y is x >= 0 when y is very large and we
4743 don't care about Infinities. */
4744 if (! HONOR_INFINITIES (mode))
4745 return fold (build (GE_EXPR, type, arg,
4746 build_real (TREE_TYPE (arg), dconst0)));
4747
4748 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
4749 if ((*lang_hooks.decls.global_bindings_p) () != 0
4750 || CONTAINS_PLACEHOLDER_P (arg))
4751 return NULL_TREE;
4752
4753 arg = save_expr (arg);
4754 return fold (build (TRUTH_ANDIF_EXPR, type,
4755 fold (build (GE_EXPR, type, arg,
4756 build_real (TREE_TYPE (arg),
4757 dconst0))),
4758 fold (build (NE_EXPR, type, arg,
4759 build_real (TREE_TYPE (arg),
4760 c2)))));
4761 }
4762
4763 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
4764 if (! HONOR_NANS (mode))
4765 return fold (build (code, type, arg,
4766 build_real (TREE_TYPE (arg), c2)));
4767
4768 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
4769 if ((*lang_hooks.decls.global_bindings_p) () == 0
4770 && ! CONTAINS_PLACEHOLDER_P (arg))
4771 {
4772 arg = save_expr (arg);
4773 return fold (build (TRUTH_ANDIF_EXPR, type,
4774 fold (build (GE_EXPR, type, arg,
4775 build_real (TREE_TYPE (arg),
4776 dconst0))),
4777 fold (build (code, type, arg,
4778 build_real (TREE_TYPE (arg),
4779 c2)))));
4780 }
4781 }
4782 }
4783
4784 return NULL_TREE;
4785 }
4786
4787 /* Subroutine of fold() that optimizes comparisons against Infinities,
4788 either +Inf or -Inf.
4789
4790 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
4791 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
4792 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
4793
4794 The function returns the constant folded tree if a simplification
4795 can be made, and NULL_TREE otherwise. */
4796
4797 static tree
4798 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
4799 {
4800 enum machine_mode mode;
4801 REAL_VALUE_TYPE max;
4802 tree temp;
4803 bool neg;
4804
4805 mode = TYPE_MODE (TREE_TYPE (arg0));
4806
4807 /* For negative infinity swap the sense of the comparison. */
4808 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
4809 if (neg)
4810 code = swap_tree_comparison (code);
4811
4812 switch (code)
4813 {
4814 case GT_EXPR:
4815 /* x > +Inf is always false, if with ignore sNANs. */
4816 if (HONOR_SNANS (mode))
4817 return NULL_TREE;
4818 return omit_one_operand (type,
4819 convert (type, integer_zero_node),
4820 arg0);
4821
4822 case LE_EXPR:
4823 /* x <= +Inf is always true, if we don't case about NaNs. */
4824 if (! HONOR_NANS (mode))
4825 return omit_one_operand (type,
4826 convert (type, integer_one_node),
4827 arg0);
4828
4829 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
4830 if ((*lang_hooks.decls.global_bindings_p) () == 0
4831 && ! CONTAINS_PLACEHOLDER_P (arg0))
4832 {
4833 arg0 = save_expr (arg0);
4834 return fold (build (EQ_EXPR, type, arg0, arg0));
4835 }
4836 break;
4837
4838 case EQ_EXPR:
4839 case GE_EXPR:
4840 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
4841 real_maxval (&max, neg, mode);
4842 return fold (build (neg ? LT_EXPR : GT_EXPR, type,
4843 arg0, build_real (TREE_TYPE (arg0), max)));
4844
4845 case LT_EXPR:
4846 /* x < +Inf is always equal to x <= DBL_MAX. */
4847 real_maxval (&max, neg, mode);
4848 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
4849 arg0, build_real (TREE_TYPE (arg0), max)));
4850
4851 case NE_EXPR:
4852 /* x != +Inf is always equal to !(x > DBL_MAX). */
4853 real_maxval (&max, neg, mode);
4854 if (! HONOR_NANS (mode))
4855 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
4856 arg0, build_real (TREE_TYPE (arg0), max)));
4857 temp = fold (build (neg ? LT_EXPR : GT_EXPR, type,
4858 arg0, build_real (TREE_TYPE (arg0), max)));
4859 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
4860
4861 default:
4862 break;
4863 }
4864
4865 return NULL_TREE;
4866 }
4867
4868 /* If CODE with arguments ARG0 and ARG1 represents a single bit
4869 equality/inequality test, then return a simplified form of
4870 the test using shifts and logical operations. Otherwise return
4871 NULL. TYPE is the desired result type. */
4872
4873 tree
4874 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
4875 tree result_type)
4876 {
4877 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
4878 operand 0. */
4879 if (code == TRUTH_NOT_EXPR)
4880 {
4881 code = TREE_CODE (arg0);
4882 if (code != NE_EXPR && code != EQ_EXPR)
4883 return NULL_TREE;
4884
4885 /* Extract the arguments of the EQ/NE. */
4886 arg1 = TREE_OPERAND (arg0, 1);
4887 arg0 = TREE_OPERAND (arg0, 0);
4888
4889 /* This requires us to invert the code. */
4890 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
4891 }
4892
4893 /* If this is testing a single bit, we can optimize the test. */
4894 if ((code == NE_EXPR || code == EQ_EXPR)
4895 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
4896 && integer_pow2p (TREE_OPERAND (arg0, 1)))
4897 {
4898 tree inner = TREE_OPERAND (arg0, 0);
4899 tree type = TREE_TYPE (arg0);
4900 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
4901 enum machine_mode operand_mode = TYPE_MODE (type);
4902 int ops_unsigned;
4903 tree signed_type, unsigned_type;
4904 tree arg00;
4905
4906 /* If we have (A & C) != 0 where C is the sign bit of A, convert
4907 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
4908 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
4909 if (arg00 != NULL_TREE)
4910 {
4911 tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
4912 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, result_type,
4913 convert (stype, arg00),
4914 convert (stype, integer_zero_node)));
4915 }
4916
4917 /* At this point, we know that arg0 is not testing the sign bit. */
4918 if (TYPE_PRECISION (type) - 1 == bitnum)
4919 abort ();
4920
4921 /* Otherwise we have (A & C) != 0 where C is a single bit,
4922 convert that into ((A >> C2) & 1). Where C2 = log2(C).
4923 Similarly for (A & C) == 0. */
4924
4925 /* If INNER is a right shift of a constant and it plus BITNUM does
4926 not overflow, adjust BITNUM and INNER. */
4927 if (TREE_CODE (inner) == RSHIFT_EXPR
4928 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
4929 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
4930 && bitnum < TYPE_PRECISION (type)
4931 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
4932 bitnum - TYPE_PRECISION (type)))
4933 {
4934 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
4935 inner = TREE_OPERAND (inner, 0);
4936 }
4937
4938 /* If we are going to be able to omit the AND below, we must do our
4939 operations as unsigned. If we must use the AND, we have a choice.
4940 Normally unsigned is faster, but for some machines signed is. */
4941 #ifdef LOAD_EXTEND_OP
4942 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
4943 #else
4944 ops_unsigned = 1;
4945 #endif
4946
4947 signed_type = (*lang_hooks.types.type_for_mode) (operand_mode, 0);
4948 unsigned_type = (*lang_hooks.types.type_for_mode) (operand_mode, 1);
4949
4950 if (bitnum != 0)
4951 inner = build (RSHIFT_EXPR, ops_unsigned ? unsigned_type : signed_type,
4952 inner, size_int (bitnum));
4953
4954 if (code == EQ_EXPR)
4955 inner = build (BIT_XOR_EXPR, ops_unsigned ? unsigned_type : signed_type,
4956 inner, integer_one_node);
4957
4958 /* Put the AND last so it can combine with more things. */
4959 inner = build (BIT_AND_EXPR, ops_unsigned ? unsigned_type : signed_type,
4960 inner, integer_one_node);
4961
4962 /* Make sure to return the proper type. */
4963 if (TREE_TYPE (inner) != result_type)
4964 inner = convert (result_type, inner);
4965
4966 return inner;
4967 }
4968 return NULL_TREE;
4969 }
4970
4971 /* Perform constant folding and related simplification of EXPR.
4972 The related simplifications include x*1 => x, x*0 => 0, etc.,
4973 and application of the associative law.
4974 NOP_EXPR conversions may be removed freely (as long as we
4975 are careful not to change the C type of the overall expression)
4976 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4977 but we can constant-fold them if they have constant operands. */
4978
4979 #ifdef ENABLE_FOLD_CHECKING
4980 # define fold(x) fold_1 (x)
4981 static tree fold_1 (tree);
4982 static
4983 #endif
4984 tree
4985 fold (tree expr)
4986 {
4987 tree t = expr, orig_t;
4988 tree t1 = NULL_TREE;
4989 tree tem;
4990 tree type = TREE_TYPE (expr);
4991 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4992 enum tree_code code = TREE_CODE (t);
4993 int kind = TREE_CODE_CLASS (code);
4994 int invert;
4995 /* WINS will be nonzero when the switch is done
4996 if all operands are constant. */
4997 int wins = 1;
4998
4999 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5000 Likewise for a SAVE_EXPR that's already been evaluated. */
5001 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5002 return t;
5003
5004 /* Return right away if a constant. */
5005 if (kind == 'c')
5006 return t;
5007
5008 #ifdef MAX_INTEGER_COMPUTATION_MODE
5009 check_max_integer_computation_mode (expr);
5010 #endif
5011 orig_t = t;
5012
5013 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5014 {
5015 tree subop;
5016
5017 /* Special case for conversion ops that can have fixed point args. */
5018 arg0 = TREE_OPERAND (t, 0);
5019
5020 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5021 if (arg0 != 0)
5022 STRIP_SIGN_NOPS (arg0);
5023
5024 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5025 subop = TREE_REALPART (arg0);
5026 else
5027 subop = arg0;
5028
5029 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5030 && TREE_CODE (subop) != REAL_CST
5031 )
5032 /* Note that TREE_CONSTANT isn't enough:
5033 static var addresses are constant but we can't
5034 do arithmetic on them. */
5035 wins = 0;
5036 }
5037 else if (IS_EXPR_CODE_CLASS (kind) || kind == 'r')
5038 {
5039 int len = first_rtl_op (code);
5040 int i;
5041 for (i = 0; i < len; i++)
5042 {
5043 tree op = TREE_OPERAND (t, i);
5044 tree subop;
5045
5046 if (op == 0)
5047 continue; /* Valid for CALL_EXPR, at least. */
5048
5049 if (kind == '<' || code == RSHIFT_EXPR)
5050 {
5051 /* Signedness matters here. Perhaps we can refine this
5052 later. */
5053 STRIP_SIGN_NOPS (op);
5054 }
5055 else
5056 /* Strip any conversions that don't change the mode. */
5057 STRIP_NOPS (op);
5058
5059 if (TREE_CODE (op) == COMPLEX_CST)
5060 subop = TREE_REALPART (op);
5061 else
5062 subop = op;
5063
5064 if (TREE_CODE (subop) != INTEGER_CST
5065 && TREE_CODE (subop) != REAL_CST)
5066 /* Note that TREE_CONSTANT isn't enough:
5067 static var addresses are constant but we can't
5068 do arithmetic on them. */
5069 wins = 0;
5070
5071 if (i == 0)
5072 arg0 = op;
5073 else if (i == 1)
5074 arg1 = op;
5075 }
5076 }
5077
5078 /* If this is a commutative operation, and ARG0 is a constant, move it
5079 to ARG1 to reduce the number of tests below. */
5080 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
5081 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
5082 || code == BIT_AND_EXPR)
5083 && ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) != INTEGER_CST)
5084 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) != REAL_CST)))
5085 {
5086 tem = arg0; arg0 = arg1; arg1 = tem;
5087
5088 if (t == orig_t)
5089 t = copy_node (t);
5090 TREE_OPERAND (t, 0) = arg0;
5091 TREE_OPERAND (t, 1) = arg1;
5092 }
5093
5094 /* Now WINS is set as described above,
5095 ARG0 is the first operand of EXPR,
5096 and ARG1 is the second operand (if it has more than one operand).
5097
5098 First check for cases where an arithmetic operation is applied to a
5099 compound, conditional, or comparison operation. Push the arithmetic
5100 operation inside the compound or conditional to see if any folding
5101 can then be done. Convert comparison to conditional for this purpose.
5102 The also optimizes non-constant cases that used to be done in
5103 expand_expr.
5104
5105 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5106 one of the operands is a comparison and the other is a comparison, a
5107 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5108 code below would make the expression more complex. Change it to a
5109 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5110 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5111
5112 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5113 || code == EQ_EXPR || code == NE_EXPR)
5114 && ((truth_value_p (TREE_CODE (arg0))
5115 && (truth_value_p (TREE_CODE (arg1))
5116 || (TREE_CODE (arg1) == BIT_AND_EXPR
5117 && integer_onep (TREE_OPERAND (arg1, 1)))))
5118 || (truth_value_p (TREE_CODE (arg1))
5119 && (truth_value_p (TREE_CODE (arg0))
5120 || (TREE_CODE (arg0) == BIT_AND_EXPR
5121 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5122 {
5123 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5124 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5125 : TRUTH_XOR_EXPR,
5126 type, arg0, arg1));
5127
5128 if (code == EQ_EXPR)
5129 t = invert_truthvalue (t);
5130
5131 return t;
5132 }
5133
5134 if (TREE_CODE_CLASS (code) == '1')
5135 {
5136 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5137 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5138 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5139 else if (TREE_CODE (arg0) == COND_EXPR)
5140 {
5141 tree arg01 = TREE_OPERAND (arg0, 1);
5142 tree arg02 = TREE_OPERAND (arg0, 2);
5143 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5144 arg01 = fold (build1 (code, type, arg01));
5145 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5146 arg02 = fold (build1 (code, type, arg02));
5147 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5148 arg01, arg02));
5149
5150 /* If this was a conversion, and all we did was to move into
5151 inside the COND_EXPR, bring it back out. But leave it if
5152 it is a conversion from integer to integer and the
5153 result precision is no wider than a word since such a
5154 conversion is cheap and may be optimized away by combine,
5155 while it couldn't if it were outside the COND_EXPR. Then return
5156 so we don't get into an infinite recursion loop taking the
5157 conversion out and then back in. */
5158
5159 if ((code == NOP_EXPR || code == CONVERT_EXPR
5160 || code == NON_LVALUE_EXPR)
5161 && TREE_CODE (t) == COND_EXPR
5162 && TREE_CODE (TREE_OPERAND (t, 1)) == code
5163 && TREE_CODE (TREE_OPERAND (t, 2)) == code
5164 && ! VOID_TYPE_P (TREE_OPERAND (t, 1))
5165 && ! VOID_TYPE_P (TREE_OPERAND (t, 2))
5166 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
5167 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
5168 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
5169 && (INTEGRAL_TYPE_P
5170 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
5171 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
5172 t = build1 (code, type,
5173 build (COND_EXPR,
5174 TREE_TYPE (TREE_OPERAND
5175 (TREE_OPERAND (t, 1), 0)),
5176 TREE_OPERAND (t, 0),
5177 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
5178 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
5179 return t;
5180 }
5181 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5182 return fold (build (COND_EXPR, type, arg0,
5183 fold (build1 (code, type, integer_one_node)),
5184 fold (build1 (code, type, integer_zero_node))));
5185 }
5186 else if (TREE_CODE_CLASS (code) == '<'
5187 && TREE_CODE (arg0) == COMPOUND_EXPR)
5188 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5189 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5190 else if (TREE_CODE_CLASS (code) == '<'
5191 && TREE_CODE (arg1) == COMPOUND_EXPR)
5192 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5193 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5194 else if (TREE_CODE_CLASS (code) == '2'
5195 || TREE_CODE_CLASS (code) == '<')
5196 {
5197 if (TREE_CODE (arg1) == COMPOUND_EXPR
5198 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1, 0))
5199 && ! TREE_SIDE_EFFECTS (arg0))
5200 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5201 fold (build (code, type,
5202 arg0, TREE_OPERAND (arg1, 1))));
5203 else if ((TREE_CODE (arg1) == COND_EXPR
5204 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
5205 && TREE_CODE_CLASS (code) != '<'))
5206 && (TREE_CODE (arg0) != COND_EXPR
5207 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5208 && (! TREE_SIDE_EFFECTS (arg0)
5209 || ((*lang_hooks.decls.global_bindings_p) () == 0
5210 && ! CONTAINS_PLACEHOLDER_P (arg0))))
5211 return
5212 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5213 /*cond_first_p=*/0);
5214 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
5215 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5216 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5217 else if ((TREE_CODE (arg0) == COND_EXPR
5218 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5219 && TREE_CODE_CLASS (code) != '<'))
5220 && (TREE_CODE (arg1) != COND_EXPR
5221 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5222 && (! TREE_SIDE_EFFECTS (arg1)
5223 || ((*lang_hooks.decls.global_bindings_p) () == 0
5224 && ! CONTAINS_PLACEHOLDER_P (arg1))))
5225 return
5226 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5227 /*cond_first_p=*/1);
5228 }
5229
5230 switch (code)
5231 {
5232 case INTEGER_CST:
5233 case REAL_CST:
5234 case VECTOR_CST:
5235 case STRING_CST:
5236 case COMPLEX_CST:
5237 case CONSTRUCTOR:
5238 return t;
5239
5240 case CONST_DECL:
5241 return fold (DECL_INITIAL (t));
5242
5243 case NOP_EXPR:
5244 case FLOAT_EXPR:
5245 case CONVERT_EXPR:
5246 case FIX_TRUNC_EXPR:
5247 /* Other kinds of FIX are not handled properly by fold_convert. */
5248
5249 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
5250 return TREE_OPERAND (t, 0);
5251
5252 /* Handle cases of two conversions in a row. */
5253 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5254 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5255 {
5256 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5257 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5258 tree final_type = TREE_TYPE (t);
5259 int inside_int = INTEGRAL_TYPE_P (inside_type);
5260 int inside_ptr = POINTER_TYPE_P (inside_type);
5261 int inside_float = FLOAT_TYPE_P (inside_type);
5262 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5263 int inside_unsignedp = TREE_UNSIGNED (inside_type);
5264 int inter_int = INTEGRAL_TYPE_P (inter_type);
5265 int inter_ptr = POINTER_TYPE_P (inter_type);
5266 int inter_float = FLOAT_TYPE_P (inter_type);
5267 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5268 int inter_unsignedp = TREE_UNSIGNED (inter_type);
5269 int final_int = INTEGRAL_TYPE_P (final_type);
5270 int final_ptr = POINTER_TYPE_P (final_type);
5271 int final_float = FLOAT_TYPE_P (final_type);
5272 unsigned int final_prec = TYPE_PRECISION (final_type);
5273 int final_unsignedp = TREE_UNSIGNED (final_type);
5274
5275 /* In addition to the cases of two conversions in a row
5276 handled below, if we are converting something to its own
5277 type via an object of identical or wider precision, neither
5278 conversion is needed. */
5279 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
5280 && ((inter_int && final_int) || (inter_float && final_float))
5281 && inter_prec >= final_prec)
5282 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5283
5284 /* Likewise, if the intermediate and final types are either both
5285 float or both integer, we don't need the middle conversion if
5286 it is wider than the final type and doesn't change the signedness
5287 (for integers). Avoid this if the final type is a pointer
5288 since then we sometimes need the inner conversion. Likewise if
5289 the outer has a precision not equal to the size of its mode. */
5290 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5291 || (inter_float && inside_float))
5292 && inter_prec >= inside_prec
5293 && (inter_float || inter_unsignedp == inside_unsignedp)
5294 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5295 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5296 && ! final_ptr)
5297 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5298
5299 /* If we have a sign-extension of a zero-extended value, we can
5300 replace that by a single zero-extension. */
5301 if (inside_int && inter_int && final_int
5302 && inside_prec < inter_prec && inter_prec < final_prec
5303 && inside_unsignedp && !inter_unsignedp)
5304 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5305
5306 /* Two conversions in a row are not needed unless:
5307 - some conversion is floating-point (overstrict for now), or
5308 - the intermediate type is narrower than both initial and
5309 final, or
5310 - the intermediate type and innermost type differ in signedness,
5311 and the outermost type is wider than the intermediate, or
5312 - the initial type is a pointer type and the precisions of the
5313 intermediate and final types differ, or
5314 - the final type is a pointer type and the precisions of the
5315 initial and intermediate types differ. */
5316 if (! inside_float && ! inter_float && ! final_float
5317 && (inter_prec > inside_prec || inter_prec > final_prec)
5318 && ! (inside_int && inter_int
5319 && inter_unsignedp != inside_unsignedp
5320 && inter_prec < final_prec)
5321 && ((inter_unsignedp && inter_prec > inside_prec)
5322 == (final_unsignedp && final_prec > inter_prec))
5323 && ! (inside_ptr && inter_prec != final_prec)
5324 && ! (final_ptr && inside_prec != inter_prec)
5325 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5326 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5327 && ! final_ptr)
5328 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5329 }
5330
5331 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5332 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5333 /* Detect assigning a bitfield. */
5334 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5335 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5336 {
5337 /* Don't leave an assignment inside a conversion
5338 unless assigning a bitfield. */
5339 tree prev = TREE_OPERAND (t, 0);
5340 if (t == orig_t)
5341 t = copy_node (t);
5342 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
5343 /* First do the assignment, then return converted constant. */
5344 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
5345 TREE_USED (t) = 1;
5346 return t;
5347 }
5348
5349 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5350 constants (if x has signed type, the sign bit cannot be set
5351 in c). This folds extension into the BIT_AND_EXPR. */
5352 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
5353 && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
5354 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5355 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5356 {
5357 tree and = TREE_OPERAND (t, 0);
5358 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5359 int change = 0;
5360
5361 if (TREE_UNSIGNED (TREE_TYPE (and))
5362 || (TYPE_PRECISION (TREE_TYPE (t))
5363 <= TYPE_PRECISION (TREE_TYPE (and))))
5364 change = 1;
5365 else if (TYPE_PRECISION (TREE_TYPE (and1))
5366 <= HOST_BITS_PER_WIDE_INT
5367 && host_integerp (and1, 1))
5368 {
5369 unsigned HOST_WIDE_INT cst;
5370
5371 cst = tree_low_cst (and1, 1);
5372 cst &= (HOST_WIDE_INT) -1
5373 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5374 change = (cst == 0);
5375 #ifdef LOAD_EXTEND_OP
5376 if (change
5377 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5378 == ZERO_EXTEND))
5379 {
5380 tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
5381 and0 = convert (uns, and0);
5382 and1 = convert (uns, and1);
5383 }
5384 #endif
5385 }
5386 if (change)
5387 return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
5388 convert (TREE_TYPE (t), and0),
5389 convert (TREE_TYPE (t), and1)));
5390 }
5391
5392 if (!wins)
5393 {
5394 if (TREE_CONSTANT (t) != TREE_CONSTANT (arg0))
5395 {
5396 if (t == orig_t)
5397 t = copy_node (t);
5398 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
5399 }
5400 return t;
5401 }
5402 return fold_convert (t, arg0);
5403
5404 case VIEW_CONVERT_EXPR:
5405 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5406 return build1 (VIEW_CONVERT_EXPR, type,
5407 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5408 return t;
5409
5410 case COMPONENT_REF:
5411 if (TREE_CODE (arg0) == CONSTRUCTOR
5412 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
5413 {
5414 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5415 if (m)
5416 t = TREE_VALUE (m);
5417 }
5418 return t;
5419
5420 case RANGE_EXPR:
5421 if (TREE_CONSTANT (t) != wins)
5422 {
5423 if (t == orig_t)
5424 t = copy_node (t);
5425 TREE_CONSTANT (t) = wins;
5426 }
5427 return t;
5428
5429 case NEGATE_EXPR:
5430 if (wins)
5431 {
5432 if (TREE_CODE (arg0) == INTEGER_CST)
5433 {
5434 unsigned HOST_WIDE_INT low;
5435 HOST_WIDE_INT high;
5436 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5437 TREE_INT_CST_HIGH (arg0),
5438 &low, &high);
5439 t = build_int_2 (low, high);
5440 TREE_TYPE (t) = type;
5441 TREE_OVERFLOW (t)
5442 = (TREE_OVERFLOW (arg0)
5443 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
5444 TREE_CONSTANT_OVERFLOW (t)
5445 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5446 }
5447 else if (TREE_CODE (arg0) == REAL_CST)
5448 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5449 }
5450 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5451 return TREE_OPERAND (arg0, 0);
5452 /* Convert -((double)float) into (double)(-float). */
5453 else if (TREE_CODE (arg0) == NOP_EXPR
5454 && TREE_CODE (type) == REAL_TYPE)
5455 {
5456 tree targ0 = strip_float_extensions (arg0);
5457 if (targ0 != arg0)
5458 return convert (type, build1 (NEGATE_EXPR, TREE_TYPE (targ0), targ0));
5459
5460 }
5461
5462 /* Convert - (a - b) to (b - a) for non-floating-point. */
5463 else if (TREE_CODE (arg0) == MINUS_EXPR
5464 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
5465 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
5466 TREE_OPERAND (arg0, 0));
5467
5468 /* Convert -f(x) into f(-x) where f is sin, tan or atan. */
5469 switch (builtin_mathfn_code (arg0))
5470 {
5471 case BUILT_IN_SIN:
5472 case BUILT_IN_SINF:
5473 case BUILT_IN_SINL:
5474 case BUILT_IN_TAN:
5475 case BUILT_IN_TANF:
5476 case BUILT_IN_TANL:
5477 case BUILT_IN_ATAN:
5478 case BUILT_IN_ATANF:
5479 case BUILT_IN_ATANL:
5480 if (negate_expr_p (TREE_VALUE (TREE_OPERAND (arg0, 1))))
5481 {
5482 tree fndecl, arg, arglist;
5483
5484 fndecl = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5485 arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5486 arg = fold (build1 (NEGATE_EXPR, type, arg));
5487 arglist = build_tree_list (NULL_TREE, arg);
5488 return build_function_call_expr (fndecl, arglist);
5489 }
5490 break;
5491
5492 default:
5493 break;
5494 }
5495 return t;
5496
5497 case ABS_EXPR:
5498 if (wins)
5499 {
5500 if (TREE_CODE (arg0) == INTEGER_CST)
5501 {
5502 /* If the value is unsigned, then the absolute value is
5503 the same as the ordinary value. */
5504 if (TREE_UNSIGNED (type))
5505 return arg0;
5506 /* Similarly, if the value is non-negative. */
5507 else if (INT_CST_LT (integer_minus_one_node, arg0))
5508 return arg0;
5509 /* If the value is negative, then the absolute value is
5510 its negation. */
5511 else
5512 {
5513 unsigned HOST_WIDE_INT low;
5514 HOST_WIDE_INT high;
5515 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5516 TREE_INT_CST_HIGH (arg0),
5517 &low, &high);
5518 t = build_int_2 (low, high);
5519 TREE_TYPE (t) = type;
5520 TREE_OVERFLOW (t)
5521 = (TREE_OVERFLOW (arg0)
5522 | force_fit_type (t, overflow));
5523 TREE_CONSTANT_OVERFLOW (t)
5524 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5525 }
5526 }
5527 else if (TREE_CODE (arg0) == REAL_CST)
5528 {
5529 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5530 t = build_real (type,
5531 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5532 }
5533 }
5534 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5535 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
5536 /* Convert fabs((double)float) into (double)fabsf(float). */
5537 else if (TREE_CODE (arg0) == NOP_EXPR
5538 && TREE_CODE (type) == REAL_TYPE)
5539 {
5540 tree targ0 = strip_float_extensions (arg0);
5541 if (targ0 != arg0)
5542 return convert (type, fold (build1 (ABS_EXPR, TREE_TYPE (targ0),
5543 targ0)));
5544 }
5545 else if (tree_expr_nonnegative_p (arg0))
5546 return arg0;
5547 return t;
5548
5549 case CONJ_EXPR:
5550 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5551 return convert (type, arg0);
5552 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5553 return build (COMPLEX_EXPR, type,
5554 TREE_OPERAND (arg0, 0),
5555 negate_expr (TREE_OPERAND (arg0, 1)));
5556 else if (TREE_CODE (arg0) == COMPLEX_CST)
5557 return build_complex (type, TREE_REALPART (arg0),
5558 negate_expr (TREE_IMAGPART (arg0)));
5559 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5560 return fold (build (TREE_CODE (arg0), type,
5561 fold (build1 (CONJ_EXPR, type,
5562 TREE_OPERAND (arg0, 0))),
5563 fold (build1 (CONJ_EXPR,
5564 type, TREE_OPERAND (arg0, 1)))));
5565 else if (TREE_CODE (arg0) == CONJ_EXPR)
5566 return TREE_OPERAND (arg0, 0);
5567 return t;
5568
5569 case BIT_NOT_EXPR:
5570 if (wins)
5571 {
5572 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5573 ~ TREE_INT_CST_HIGH (arg0));
5574 TREE_TYPE (t) = type;
5575 force_fit_type (t, 0);
5576 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5577 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5578 }
5579 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5580 return TREE_OPERAND (arg0, 0);
5581 return t;
5582
5583 case PLUS_EXPR:
5584 /* A + (-B) -> A - B */
5585 if (TREE_CODE (arg1) == NEGATE_EXPR)
5586 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5587 /* (-A) + B -> B - A */
5588 if (TREE_CODE (arg0) == NEGATE_EXPR)
5589 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5590 else if (! FLOAT_TYPE_P (type))
5591 {
5592 if (integer_zerop (arg1))
5593 return non_lvalue (convert (type, arg0));
5594
5595 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5596 with a constant, and the two constants have no bits in common,
5597 we should treat this as a BIT_IOR_EXPR since this may produce more
5598 simplifications. */
5599 if (TREE_CODE (arg0) == BIT_AND_EXPR
5600 && TREE_CODE (arg1) == BIT_AND_EXPR
5601 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5602 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5603 && integer_zerop (const_binop (BIT_AND_EXPR,
5604 TREE_OPERAND (arg0, 1),
5605 TREE_OPERAND (arg1, 1), 0)))
5606 {
5607 code = BIT_IOR_EXPR;
5608 goto bit_ior;
5609 }
5610
5611 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5612 (plus (plus (mult) (mult)) (foo)) so that we can
5613 take advantage of the factoring cases below. */
5614 if ((TREE_CODE (arg0) == PLUS_EXPR
5615 && TREE_CODE (arg1) == MULT_EXPR)
5616 || (TREE_CODE (arg1) == PLUS_EXPR
5617 && TREE_CODE (arg0) == MULT_EXPR))
5618 {
5619 tree parg0, parg1, parg, marg;
5620
5621 if (TREE_CODE (arg0) == PLUS_EXPR)
5622 parg = arg0, marg = arg1;
5623 else
5624 parg = arg1, marg = arg0;
5625 parg0 = TREE_OPERAND (parg, 0);
5626 parg1 = TREE_OPERAND (parg, 1);
5627 STRIP_NOPS (parg0);
5628 STRIP_NOPS (parg1);
5629
5630 if (TREE_CODE (parg0) == MULT_EXPR
5631 && TREE_CODE (parg1) != MULT_EXPR)
5632 return fold (build (PLUS_EXPR, type,
5633 fold (build (PLUS_EXPR, type,
5634 convert (type, parg0),
5635 convert (type, marg))),
5636 convert (type, parg1)));
5637 if (TREE_CODE (parg0) != MULT_EXPR
5638 && TREE_CODE (parg1) == MULT_EXPR)
5639 return fold (build (PLUS_EXPR, type,
5640 fold (build (PLUS_EXPR, type,
5641 convert (type, parg1),
5642 convert (type, marg))),
5643 convert (type, parg0)));
5644 }
5645
5646 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5647 {
5648 tree arg00, arg01, arg10, arg11;
5649 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5650
5651 /* (A * C) + (B * C) -> (A+B) * C.
5652 We are most concerned about the case where C is a constant,
5653 but other combinations show up during loop reduction. Since
5654 it is not difficult, try all four possibilities. */
5655
5656 arg00 = TREE_OPERAND (arg0, 0);
5657 arg01 = TREE_OPERAND (arg0, 1);
5658 arg10 = TREE_OPERAND (arg1, 0);
5659 arg11 = TREE_OPERAND (arg1, 1);
5660 same = NULL_TREE;
5661
5662 if (operand_equal_p (arg01, arg11, 0))
5663 same = arg01, alt0 = arg00, alt1 = arg10;
5664 else if (operand_equal_p (arg00, arg10, 0))
5665 same = arg00, alt0 = arg01, alt1 = arg11;
5666 else if (operand_equal_p (arg00, arg11, 0))
5667 same = arg00, alt0 = arg01, alt1 = arg10;
5668 else if (operand_equal_p (arg01, arg10, 0))
5669 same = arg01, alt0 = arg00, alt1 = arg11;
5670
5671 /* No identical multiplicands; see if we can find a common
5672 power-of-two factor in non-power-of-two multiplies. This
5673 can help in multi-dimensional array access. */
5674 else if (TREE_CODE (arg01) == INTEGER_CST
5675 && TREE_CODE (arg11) == INTEGER_CST
5676 && TREE_INT_CST_HIGH (arg01) == 0
5677 && TREE_INT_CST_HIGH (arg11) == 0)
5678 {
5679 HOST_WIDE_INT int01, int11, tmp;
5680 int01 = TREE_INT_CST_LOW (arg01);
5681 int11 = TREE_INT_CST_LOW (arg11);
5682
5683 /* Move min of absolute values to int11. */
5684 if ((int01 >= 0 ? int01 : -int01)
5685 < (int11 >= 0 ? int11 : -int11))
5686 {
5687 tmp = int01, int01 = int11, int11 = tmp;
5688 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5689 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5690 }
5691
5692 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5693 {
5694 alt0 = fold (build (MULT_EXPR, type, arg00,
5695 build_int_2 (int01 / int11, 0)));
5696 alt1 = arg10;
5697 same = arg11;
5698 }
5699 }
5700
5701 if (same)
5702 return fold (build (MULT_EXPR, type,
5703 fold (build (PLUS_EXPR, type, alt0, alt1)),
5704 same));
5705 }
5706 }
5707 else
5708 {
5709 /* See if ARG1 is zero and X + ARG1 reduces to X. */
5710 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
5711 return non_lvalue (convert (type, arg0));
5712
5713 /* Likewise if the operands are reversed. */
5714 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5715 return non_lvalue (convert (type, arg1));
5716
5717 /* Convert x+x into x*2.0. */
5718 if (operand_equal_p (arg0, arg1, 0))
5719 return fold (build (MULT_EXPR, type, arg0,
5720 build_real (type, dconst2)));
5721
5722 /* Convert x*c+x into x*(c+1). */
5723 if (flag_unsafe_math_optimizations
5724 && TREE_CODE (arg0) == MULT_EXPR
5725 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
5726 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
5727 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
5728 {
5729 REAL_VALUE_TYPE c;
5730
5731 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
5732 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
5733 return fold (build (MULT_EXPR, type, arg1,
5734 build_real (type, c)));
5735 }
5736
5737 /* Convert x+x*c into x*(c+1). */
5738 if (flag_unsafe_math_optimizations
5739 && TREE_CODE (arg1) == MULT_EXPR
5740 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
5741 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
5742 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
5743 {
5744 REAL_VALUE_TYPE c;
5745
5746 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
5747 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
5748 return fold (build (MULT_EXPR, type, arg0,
5749 build_real (type, c)));
5750 }
5751
5752 /* Convert x*c1+x*c2 into x*(c1+c2). */
5753 if (flag_unsafe_math_optimizations
5754 && TREE_CODE (arg0) == MULT_EXPR
5755 && TREE_CODE (arg1) == MULT_EXPR
5756 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
5757 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
5758 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
5759 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
5760 && operand_equal_p (TREE_OPERAND (arg0, 0),
5761 TREE_OPERAND (arg1, 0), 0))
5762 {
5763 REAL_VALUE_TYPE c1, c2;
5764
5765 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
5766 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
5767 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
5768 return fold (build (MULT_EXPR, type,
5769 TREE_OPERAND (arg0, 0),
5770 build_real (type, c1)));
5771 }
5772 }
5773
5774 bit_rotate:
5775 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5776 is a rotate of A by C1 bits. */
5777 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5778 is a rotate of A by B bits. */
5779 {
5780 enum tree_code code0, code1;
5781 code0 = TREE_CODE (arg0);
5782 code1 = TREE_CODE (arg1);
5783 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5784 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5785 && operand_equal_p (TREE_OPERAND (arg0, 0),
5786 TREE_OPERAND (arg1, 0), 0)
5787 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5788 {
5789 tree tree01, tree11;
5790 enum tree_code code01, code11;
5791
5792 tree01 = TREE_OPERAND (arg0, 1);
5793 tree11 = TREE_OPERAND (arg1, 1);
5794 STRIP_NOPS (tree01);
5795 STRIP_NOPS (tree11);
5796 code01 = TREE_CODE (tree01);
5797 code11 = TREE_CODE (tree11);
5798 if (code01 == INTEGER_CST
5799 && code11 == INTEGER_CST
5800 && TREE_INT_CST_HIGH (tree01) == 0
5801 && TREE_INT_CST_HIGH (tree11) == 0
5802 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5803 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5804 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5805 code0 == LSHIFT_EXPR ? tree01 : tree11);
5806 else if (code11 == MINUS_EXPR)
5807 {
5808 tree tree110, tree111;
5809 tree110 = TREE_OPERAND (tree11, 0);
5810 tree111 = TREE_OPERAND (tree11, 1);
5811 STRIP_NOPS (tree110);
5812 STRIP_NOPS (tree111);
5813 if (TREE_CODE (tree110) == INTEGER_CST
5814 && 0 == compare_tree_int (tree110,
5815 TYPE_PRECISION
5816 (TREE_TYPE (TREE_OPERAND
5817 (arg0, 0))))
5818 && operand_equal_p (tree01, tree111, 0))
5819 return build ((code0 == LSHIFT_EXPR
5820 ? LROTATE_EXPR
5821 : RROTATE_EXPR),
5822 type, TREE_OPERAND (arg0, 0), tree01);
5823 }
5824 else if (code01 == MINUS_EXPR)
5825 {
5826 tree tree010, tree011;
5827 tree010 = TREE_OPERAND (tree01, 0);
5828 tree011 = TREE_OPERAND (tree01, 1);
5829 STRIP_NOPS (tree010);
5830 STRIP_NOPS (tree011);
5831 if (TREE_CODE (tree010) == INTEGER_CST
5832 && 0 == compare_tree_int (tree010,
5833 TYPE_PRECISION
5834 (TREE_TYPE (TREE_OPERAND
5835 (arg0, 0))))
5836 && operand_equal_p (tree11, tree011, 0))
5837 return build ((code0 != LSHIFT_EXPR
5838 ? LROTATE_EXPR
5839 : RROTATE_EXPR),
5840 type, TREE_OPERAND (arg0, 0), tree11);
5841 }
5842 }
5843 }
5844
5845 associate:
5846 /* In most languages, can't associate operations on floats through
5847 parentheses. Rather than remember where the parentheses were, we
5848 don't associate floats at all, unless the user has specified
5849 -funsafe-math-optimizations. */
5850
5851 if (! wins
5852 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
5853 {
5854 tree var0, con0, lit0, minus_lit0;
5855 tree var1, con1, lit1, minus_lit1;
5856
5857 /* Split both trees into variables, constants, and literals. Then
5858 associate each group together, the constants with literals,
5859 then the result with variables. This increases the chances of
5860 literals being recombined later and of generating relocatable
5861 expressions for the sum of a constant and literal. */
5862 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
5863 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
5864 code == MINUS_EXPR);
5865
5866 /* Only do something if we found more than two objects. Otherwise,
5867 nothing has changed and we risk infinite recursion. */
5868 if (2 < ((var0 != 0) + (var1 != 0)
5869 + (con0 != 0) + (con1 != 0)
5870 + (lit0 != 0) + (lit1 != 0)
5871 + (minus_lit0 != 0) + (minus_lit1 != 0)))
5872 {
5873 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
5874 if (code == MINUS_EXPR)
5875 code = PLUS_EXPR;
5876
5877 var0 = associate_trees (var0, var1, code, type);
5878 con0 = associate_trees (con0, con1, code, type);
5879 lit0 = associate_trees (lit0, lit1, code, type);
5880 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
5881
5882 /* Preserve the MINUS_EXPR if the negative part of the literal is
5883 greater than the positive part. Otherwise, the multiplicative
5884 folding code (i.e extract_muldiv) may be fooled in case
5885 unsigned constants are subtracted, like in the following
5886 example: ((X*2 + 4) - 8U)/2. */
5887 if (minus_lit0 && lit0)
5888 {
5889 if (tree_int_cst_lt (lit0, minus_lit0))
5890 {
5891 minus_lit0 = associate_trees (minus_lit0, lit0,
5892 MINUS_EXPR, type);
5893 lit0 = 0;
5894 }
5895 else
5896 {
5897 lit0 = associate_trees (lit0, minus_lit0,
5898 MINUS_EXPR, type);
5899 minus_lit0 = 0;
5900 }
5901 }
5902 if (minus_lit0)
5903 {
5904 if (con0 == 0)
5905 return convert (type, associate_trees (var0, minus_lit0,
5906 MINUS_EXPR, type));
5907 else
5908 {
5909 con0 = associate_trees (con0, minus_lit0,
5910 MINUS_EXPR, type);
5911 return convert (type, associate_trees (var0, con0,
5912 PLUS_EXPR, type));
5913 }
5914 }
5915
5916 con0 = associate_trees (con0, lit0, code, type);
5917 return convert (type, associate_trees (var0, con0, code, type));
5918 }
5919 }
5920
5921 binary:
5922 if (wins)
5923 t1 = const_binop (code, arg0, arg1, 0);
5924 if (t1 != NULL_TREE)
5925 {
5926 /* The return value should always have
5927 the same type as the original expression. */
5928 if (TREE_TYPE (t1) != TREE_TYPE (t))
5929 t1 = convert (TREE_TYPE (t), t1);
5930
5931 return t1;
5932 }
5933 return t;
5934
5935 case MINUS_EXPR:
5936 /* A - (-B) -> A + B */
5937 if (TREE_CODE (arg1) == NEGATE_EXPR)
5938 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5939 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
5940 if (TREE_CODE (arg0) == NEGATE_EXPR
5941 && (FLOAT_TYPE_P (type)
5942 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
5943 && negate_expr_p (arg1)
5944 && (! TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
5945 && (! TREE_SIDE_EFFECTS (arg1) || TREE_CONSTANT (arg0)))
5946 return fold (build (MINUS_EXPR, type, negate_expr (arg1),
5947 TREE_OPERAND (arg0, 0)));
5948
5949 if (! FLOAT_TYPE_P (type))
5950 {
5951 if (! wins && integer_zerop (arg0))
5952 return negate_expr (convert (type, arg1));
5953 if (integer_zerop (arg1))
5954 return non_lvalue (convert (type, arg0));
5955
5956 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
5957 about the case where C is a constant, just try one of the
5958 four possibilities. */
5959
5960 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
5961 && operand_equal_p (TREE_OPERAND (arg0, 1),
5962 TREE_OPERAND (arg1, 1), 0))
5963 return fold (build (MULT_EXPR, type,
5964 fold (build (MINUS_EXPR, type,
5965 TREE_OPERAND (arg0, 0),
5966 TREE_OPERAND (arg1, 0))),
5967 TREE_OPERAND (arg0, 1)));
5968
5969 /* Fold A - (A & B) into ~B & A. */
5970 if (!TREE_SIDE_EFFECTS (arg0)
5971 && TREE_CODE (arg1) == BIT_AND_EXPR)
5972 {
5973 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
5974 return fold (build (BIT_AND_EXPR, type,
5975 fold (build1 (BIT_NOT_EXPR, type,
5976 TREE_OPERAND (arg1, 0))),
5977 arg0));
5978 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
5979 return fold (build (BIT_AND_EXPR, type,
5980 fold (build1 (BIT_NOT_EXPR, type,
5981 TREE_OPERAND (arg1, 1))),
5982 arg0));
5983 }
5984 }
5985
5986 /* See if ARG1 is zero and X - ARG1 reduces to X. */
5987 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
5988 return non_lvalue (convert (type, arg0));
5989
5990 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
5991 ARG0 is zero and X + ARG0 reduces to X, since that would mean
5992 (-ARG1 + ARG0) reduces to -ARG1. */
5993 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5994 return negate_expr (convert (type, arg1));
5995
5996 /* Fold &x - &x. This can happen from &x.foo - &x.
5997 This is unsafe for certain floats even in non-IEEE formats.
5998 In IEEE, it is unsafe because it does wrong for NaNs.
5999 Also note that operand_equal_p is always false if an operand
6000 is volatile. */
6001
6002 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6003 && operand_equal_p (arg0, arg1, 0))
6004 return convert (type, integer_zero_node);
6005
6006 goto associate;
6007
6008 case MULT_EXPR:
6009 /* (-A) * (-B) -> A * B */
6010 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6011 return fold (build (MULT_EXPR, type,
6012 TREE_OPERAND (arg0, 0),
6013 negate_expr (arg1)));
6014 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6015 return fold (build (MULT_EXPR, type,
6016 negate_expr (arg0),
6017 TREE_OPERAND (arg1, 0)));
6018
6019 if (! FLOAT_TYPE_P (type))
6020 {
6021 if (integer_zerop (arg1))
6022 return omit_one_operand (type, arg1, arg0);
6023 if (integer_onep (arg1))
6024 return non_lvalue (convert (type, arg0));
6025
6026 /* (a * (1 << b)) is (a << b) */
6027 if (TREE_CODE (arg1) == LSHIFT_EXPR
6028 && integer_onep (TREE_OPERAND (arg1, 0)))
6029 return fold (build (LSHIFT_EXPR, type, arg0,
6030 TREE_OPERAND (arg1, 1)));
6031 if (TREE_CODE (arg0) == LSHIFT_EXPR
6032 && integer_onep (TREE_OPERAND (arg0, 0)))
6033 return fold (build (LSHIFT_EXPR, type, arg1,
6034 TREE_OPERAND (arg0, 1)));
6035
6036 if (TREE_CODE (arg1) == INTEGER_CST
6037 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6038 convert (type, arg1),
6039 code, NULL_TREE)))
6040 return convert (type, tem);
6041
6042 }
6043 else
6044 {
6045 /* Maybe fold x * 0 to 0. The expressions aren't the same
6046 when x is NaN, since x * 0 is also NaN. Nor are they the
6047 same in modes with signed zeros, since multiplying a
6048 negative value by 0 gives -0, not +0. */
6049 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6050 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6051 && real_zerop (arg1))
6052 return omit_one_operand (type, arg1, arg0);
6053 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6054 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6055 && real_onep (arg1))
6056 return non_lvalue (convert (type, arg0));
6057
6058 /* Transform x * -1.0 into -x. */
6059 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6060 && real_minus_onep (arg1))
6061 return fold (build1 (NEGATE_EXPR, type, arg0));
6062
6063 if (flag_unsafe_math_optimizations)
6064 {
6065 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6066 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6067
6068 /* Optimizations of sqrt(...)*sqrt(...). */
6069 if ((fcode0 == BUILT_IN_SQRT && fcode1 == BUILT_IN_SQRT)
6070 || (fcode0 == BUILT_IN_SQRTF && fcode1 == BUILT_IN_SQRTF)
6071 || (fcode0 == BUILT_IN_SQRTL && fcode1 == BUILT_IN_SQRTL))
6072 {
6073 tree sqrtfn, arg, arglist;
6074 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6075 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6076
6077 /* Optimize sqrt(x)*sqrt(x) as x. */
6078 if (operand_equal_p (arg00, arg10, 0)
6079 && ! HONOR_SNANS (TYPE_MODE (type)))
6080 return arg00;
6081
6082 /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y). */
6083 sqrtfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6084 arg = fold (build (MULT_EXPR, type, arg00, arg10));
6085 arglist = build_tree_list (NULL_TREE, arg);
6086 return build_function_call_expr (sqrtfn, arglist);
6087 }
6088
6089 /* Optimize exp(x)*exp(y) as exp(x+y). */
6090 if ((fcode0 == BUILT_IN_EXP && fcode1 == BUILT_IN_EXP)
6091 || (fcode0 == BUILT_IN_EXPF && fcode1 == BUILT_IN_EXPF)
6092 || (fcode0 == BUILT_IN_EXPL && fcode1 == BUILT_IN_EXPL))
6093 {
6094 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6095 tree arg = build (PLUS_EXPR, type,
6096 TREE_VALUE (TREE_OPERAND (arg0, 1)),
6097 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6098 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6099 return build_function_call_expr (expfn, arglist);
6100 }
6101
6102 /* Optimizations of pow(...)*pow(...). */
6103 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6104 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6105 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6106 {
6107 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6108 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6109 1)));
6110 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6111 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6112 1)));
6113
6114 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6115 if (operand_equal_p (arg01, arg11, 0))
6116 {
6117 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6118 tree arg = build (MULT_EXPR, type, arg00, arg10);
6119 tree arglist = tree_cons (NULL_TREE, fold (arg),
6120 build_tree_list (NULL_TREE,
6121 arg01));
6122 return build_function_call_expr (powfn, arglist);
6123 }
6124
6125 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6126 if (operand_equal_p (arg00, arg10, 0))
6127 {
6128 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6129 tree arg = fold (build (PLUS_EXPR, type, arg01, arg11));
6130 tree arglist = tree_cons (NULL_TREE, arg00,
6131 build_tree_list (NULL_TREE,
6132 arg));
6133 return build_function_call_expr (powfn, arglist);
6134 }
6135 }
6136
6137 /* Optimize tan(x)*cos(x) as sin(x). */
6138 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6139 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6140 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6141 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6142 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6143 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6144 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6145 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6146 {
6147 tree sinfn;
6148
6149 switch (fcode0)
6150 {
6151 case BUILT_IN_TAN:
6152 case BUILT_IN_COS:
6153 sinfn = implicit_built_in_decls[BUILT_IN_SIN];
6154 break;
6155 case BUILT_IN_TANF:
6156 case BUILT_IN_COSF:
6157 sinfn = implicit_built_in_decls[BUILT_IN_SINF];
6158 break;
6159 case BUILT_IN_TANL:
6160 case BUILT_IN_COSL:
6161 sinfn = implicit_built_in_decls[BUILT_IN_SINL];
6162 break;
6163 default:
6164 sinfn = NULL_TREE;
6165 }
6166
6167 if (sinfn != NULL_TREE)
6168 return build_function_call_expr (sinfn,
6169 TREE_OPERAND (arg0, 1));
6170 }
6171
6172 /* Optimize x*pow(x,c) as pow(x,c+1). */
6173 if (fcode1 == BUILT_IN_POW
6174 || fcode1 == BUILT_IN_POWF
6175 || fcode1 == BUILT_IN_POWL)
6176 {
6177 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6178 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6179 1)));
6180 if (TREE_CODE (arg11) == REAL_CST
6181 && ! TREE_CONSTANT_OVERFLOW (arg11)
6182 && operand_equal_p (arg0, arg10, 0))
6183 {
6184 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6185 REAL_VALUE_TYPE c;
6186 tree arg, arglist;
6187
6188 c = TREE_REAL_CST (arg11);
6189 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6190 arg = build_real (type, c);
6191 arglist = build_tree_list (NULL_TREE, arg);
6192 arglist = tree_cons (NULL_TREE, arg0, arglist);
6193 return build_function_call_expr (powfn, arglist);
6194 }
6195 }
6196
6197 /* Optimize pow(x,c)*x as pow(x,c+1). */
6198 if (fcode0 == BUILT_IN_POW
6199 || fcode0 == BUILT_IN_POWF
6200 || fcode0 == BUILT_IN_POWL)
6201 {
6202 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6203 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6204 1)));
6205 if (TREE_CODE (arg01) == REAL_CST
6206 && ! TREE_CONSTANT_OVERFLOW (arg01)
6207 && operand_equal_p (arg1, arg00, 0))
6208 {
6209 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6210 REAL_VALUE_TYPE c;
6211 tree arg, arglist;
6212
6213 c = TREE_REAL_CST (arg01);
6214 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6215 arg = build_real (type, c);
6216 arglist = build_tree_list (NULL_TREE, arg);
6217 arglist = tree_cons (NULL_TREE, arg1, arglist);
6218 return build_function_call_expr (powfn, arglist);
6219 }
6220 }
6221
6222 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6223 if (! optimize_size
6224 && operand_equal_p (arg0, arg1, 0))
6225 {
6226 tree powfn;
6227
6228 if (type == double_type_node)
6229 powfn = implicit_built_in_decls[BUILT_IN_POW];
6230 else if (type == float_type_node)
6231 powfn = implicit_built_in_decls[BUILT_IN_POWF];
6232 else if (type == long_double_type_node)
6233 powfn = implicit_built_in_decls[BUILT_IN_POWL];
6234 else
6235 powfn = NULL_TREE;
6236
6237 if (powfn)
6238 {
6239 tree arg = build_real (type, dconst2);
6240 tree arglist = build_tree_list (NULL_TREE, arg);
6241 arglist = tree_cons (NULL_TREE, arg0, arglist);
6242 return build_function_call_expr (powfn, arglist);
6243 }
6244 }
6245 }
6246 }
6247 goto associate;
6248
6249 case BIT_IOR_EXPR:
6250 bit_ior:
6251 if (integer_all_onesp (arg1))
6252 return omit_one_operand (type, arg1, arg0);
6253 if (integer_zerop (arg1))
6254 return non_lvalue (convert (type, arg0));
6255 t1 = distribute_bit_expr (code, type, arg0, arg1);
6256 if (t1 != NULL_TREE)
6257 return t1;
6258
6259 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6260
6261 This results in more efficient code for machines without a NAND
6262 instruction. Combine will canonicalize to the first form
6263 which will allow use of NAND instructions provided by the
6264 backend if they exist. */
6265 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6266 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6267 {
6268 return fold (build1 (BIT_NOT_EXPR, type,
6269 build (BIT_AND_EXPR, type,
6270 TREE_OPERAND (arg0, 0),
6271 TREE_OPERAND (arg1, 0))));
6272 }
6273
6274 /* See if this can be simplified into a rotate first. If that
6275 is unsuccessful continue in the association code. */
6276 goto bit_rotate;
6277
6278 case BIT_XOR_EXPR:
6279 if (integer_zerop (arg1))
6280 return non_lvalue (convert (type, arg0));
6281 if (integer_all_onesp (arg1))
6282 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6283
6284 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6285 with a constant, and the two constants have no bits in common,
6286 we should treat this as a BIT_IOR_EXPR since this may produce more
6287 simplifications. */
6288 if (TREE_CODE (arg0) == BIT_AND_EXPR
6289 && TREE_CODE (arg1) == BIT_AND_EXPR
6290 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6291 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6292 && integer_zerop (const_binop (BIT_AND_EXPR,
6293 TREE_OPERAND (arg0, 1),
6294 TREE_OPERAND (arg1, 1), 0)))
6295 {
6296 code = BIT_IOR_EXPR;
6297 goto bit_ior;
6298 }
6299
6300 /* See if this can be simplified into a rotate first. If that
6301 is unsuccessful continue in the association code. */
6302 goto bit_rotate;
6303
6304 case BIT_AND_EXPR:
6305 bit_and:
6306 if (integer_all_onesp (arg1))
6307 return non_lvalue (convert (type, arg0));
6308 if (integer_zerop (arg1))
6309 return omit_one_operand (type, arg1, arg0);
6310 t1 = distribute_bit_expr (code, type, arg0, arg1);
6311 if (t1 != NULL_TREE)
6312 return t1;
6313 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6314 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6315 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6316 {
6317 unsigned int prec
6318 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6319
6320 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6321 && (~TREE_INT_CST_LOW (arg1)
6322 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6323 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
6324 }
6325
6326 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6327
6328 This results in more efficient code for machines without a NOR
6329 instruction. Combine will canonicalize to the first form
6330 which will allow use of NOR instructions provided by the
6331 backend if they exist. */
6332 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6333 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6334 {
6335 return fold (build1 (BIT_NOT_EXPR, type,
6336 build (BIT_IOR_EXPR, type,
6337 TREE_OPERAND (arg0, 0),
6338 TREE_OPERAND (arg1, 0))));
6339 }
6340
6341 goto associate;
6342
6343 case BIT_ANDTC_EXPR:
6344 if (integer_all_onesp (arg0))
6345 return non_lvalue (convert (type, arg1));
6346 if (integer_zerop (arg0))
6347 return omit_one_operand (type, arg0, arg1);
6348 if (TREE_CODE (arg1) == INTEGER_CST)
6349 {
6350 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
6351 code = BIT_AND_EXPR;
6352 goto bit_and;
6353 }
6354 goto binary;
6355
6356 case RDIV_EXPR:
6357 /* Don't touch a floating-point divide by zero unless the mode
6358 of the constant can represent infinity. */
6359 if (TREE_CODE (arg1) == REAL_CST
6360 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6361 && real_zerop (arg1))
6362 return t;
6363
6364 /* (-A) / (-B) -> A / B */
6365 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6366 return fold (build (RDIV_EXPR, type,
6367 TREE_OPERAND (arg0, 0),
6368 negate_expr (arg1)));
6369 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6370 return fold (build (RDIV_EXPR, type,
6371 negate_expr (arg0),
6372 TREE_OPERAND (arg1, 0)));
6373
6374 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6375 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6376 && real_onep (arg1))
6377 return non_lvalue (convert (type, arg0));
6378
6379 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
6380 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6381 && real_minus_onep (arg1))
6382 return non_lvalue (convert (type, negate_expr (arg0)));
6383
6384 /* If ARG1 is a constant, we can convert this to a multiply by the
6385 reciprocal. This does not have the same rounding properties,
6386 so only do this if -funsafe-math-optimizations. We can actually
6387 always safely do it if ARG1 is a power of two, but it's hard to
6388 tell if it is or not in a portable manner. */
6389 if (TREE_CODE (arg1) == REAL_CST)
6390 {
6391 if (flag_unsafe_math_optimizations
6392 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6393 arg1, 0)))
6394 return fold (build (MULT_EXPR, type, arg0, tem));
6395 /* Find the reciprocal if optimizing and the result is exact. */
6396 else if (optimize)
6397 {
6398 REAL_VALUE_TYPE r;
6399 r = TREE_REAL_CST (arg1);
6400 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6401 {
6402 tem = build_real (type, r);
6403 return fold (build (MULT_EXPR, type, arg0, tem));
6404 }
6405 }
6406 }
6407 /* Convert A/B/C to A/(B*C). */
6408 if (flag_unsafe_math_optimizations
6409 && TREE_CODE (arg0) == RDIV_EXPR)
6410 {
6411 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6412 build (MULT_EXPR, type, TREE_OPERAND (arg0, 1),
6413 arg1)));
6414 }
6415 /* Convert A/(B/C) to (A/B)*C. */
6416 if (flag_unsafe_math_optimizations
6417 && TREE_CODE (arg1) == RDIV_EXPR)
6418 {
6419 return fold (build (MULT_EXPR, type,
6420 build (RDIV_EXPR, type, arg0,
6421 TREE_OPERAND (arg1, 0)),
6422 TREE_OPERAND (arg1, 1)));
6423 }
6424
6425 if (flag_unsafe_math_optimizations)
6426 {
6427 enum built_in_function fcode = builtin_mathfn_code (arg1);
6428 /* Optimize x/exp(y) into x*exp(-y). */
6429 if (fcode == BUILT_IN_EXP
6430 || fcode == BUILT_IN_EXPF
6431 || fcode == BUILT_IN_EXPL)
6432 {
6433 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6434 tree arg = build1 (NEGATE_EXPR, type,
6435 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6436 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6437 arg1 = build_function_call_expr (expfn, arglist);
6438 return fold (build (MULT_EXPR, type, arg0, arg1));
6439 }
6440
6441 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6442 if (fcode == BUILT_IN_POW
6443 || fcode == BUILT_IN_POWF
6444 || fcode == BUILT_IN_POWL)
6445 {
6446 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6447 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6448 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6449 tree neg11 = fold (build1 (NEGATE_EXPR, type, arg11));
6450 tree arglist = tree_cons(NULL_TREE, arg10,
6451 build_tree_list (NULL_TREE, neg11));
6452 arg1 = build_function_call_expr (powfn, arglist);
6453 return fold (build (MULT_EXPR, type, arg0, arg1));
6454 }
6455 }
6456
6457 if (flag_unsafe_math_optimizations)
6458 {
6459 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6460 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6461
6462 /* Optimize sin(x)/cos(x) as tan(x). */
6463 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
6464 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
6465 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
6466 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6467 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6468 {
6469 tree tanfn;
6470
6471 if (fcode0 == BUILT_IN_SIN)
6472 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6473 else if (fcode0 == BUILT_IN_SINF)
6474 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6475 else if (fcode0 == BUILT_IN_SINL)
6476 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6477 else
6478 tanfn = NULL_TREE;
6479
6480 if (tanfn != NULL_TREE)
6481 return build_function_call_expr (tanfn,
6482 TREE_OPERAND (arg0, 1));
6483 }
6484
6485 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
6486 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
6487 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
6488 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
6489 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6490 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6491 {
6492 tree tanfn;
6493
6494 if (fcode0 == BUILT_IN_COS)
6495 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6496 else if (fcode0 == BUILT_IN_COSF)
6497 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6498 else if (fcode0 == BUILT_IN_COSL)
6499 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6500 else
6501 tanfn = NULL_TREE;
6502
6503 if (tanfn != NULL_TREE)
6504 {
6505 tree tmp = TREE_OPERAND (arg0, 1);
6506 tmp = build_function_call_expr (tanfn, tmp);
6507 return fold (build (RDIV_EXPR, type,
6508 build_real (type, dconst1),
6509 tmp));
6510 }
6511 }
6512
6513 /* Optimize pow(x,c)/x as pow(x,c-1). */
6514 if (fcode0 == BUILT_IN_POW
6515 || fcode0 == BUILT_IN_POWF
6516 || fcode0 == BUILT_IN_POWL)
6517 {
6518 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6519 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
6520 if (TREE_CODE (arg01) == REAL_CST
6521 && ! TREE_CONSTANT_OVERFLOW (arg01)
6522 && operand_equal_p (arg1, arg00, 0))
6523 {
6524 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6525 REAL_VALUE_TYPE c;
6526 tree arg, arglist;
6527
6528 c = TREE_REAL_CST (arg01);
6529 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
6530 arg = build_real (type, c);
6531 arglist = build_tree_list (NULL_TREE, arg);
6532 arglist = tree_cons (NULL_TREE, arg1, arglist);
6533 return build_function_call_expr (powfn, arglist);
6534 }
6535 }
6536 }
6537 goto binary;
6538
6539 case TRUNC_DIV_EXPR:
6540 case ROUND_DIV_EXPR:
6541 case FLOOR_DIV_EXPR:
6542 case CEIL_DIV_EXPR:
6543 case EXACT_DIV_EXPR:
6544 if (integer_onep (arg1))
6545 return non_lvalue (convert (type, arg0));
6546 if (integer_zerop (arg1))
6547 return t;
6548
6549 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6550 operation, EXACT_DIV_EXPR.
6551
6552 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6553 At one time others generated faster code, it's not clear if they do
6554 after the last round to changes to the DIV code in expmed.c. */
6555 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
6556 && multiple_of_p (type, arg0, arg1))
6557 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
6558
6559 if (TREE_CODE (arg1) == INTEGER_CST
6560 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6561 code, NULL_TREE)))
6562 return convert (type, tem);
6563
6564 goto binary;
6565
6566 case CEIL_MOD_EXPR:
6567 case FLOOR_MOD_EXPR:
6568 case ROUND_MOD_EXPR:
6569 case TRUNC_MOD_EXPR:
6570 if (integer_onep (arg1))
6571 return omit_one_operand (type, integer_zero_node, arg0);
6572 if (integer_zerop (arg1))
6573 return t;
6574
6575 if (TREE_CODE (arg1) == INTEGER_CST
6576 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6577 code, NULL_TREE)))
6578 return convert (type, tem);
6579
6580 goto binary;
6581
6582 case LROTATE_EXPR:
6583 case RROTATE_EXPR:
6584 if (integer_all_onesp (arg0))
6585 return omit_one_operand (type, arg0, arg1);
6586 goto shift;
6587
6588 case RSHIFT_EXPR:
6589 /* Optimize -1 >> x for arithmetic right shifts. */
6590 if (integer_all_onesp (arg0) && ! TREE_UNSIGNED (type))
6591 return omit_one_operand (type, arg0, arg1);
6592 /* ... fall through ... */
6593
6594 case LSHIFT_EXPR:
6595 shift:
6596 if (integer_zerop (arg1))
6597 return non_lvalue (convert (type, arg0));
6598 if (integer_zerop (arg0))
6599 return omit_one_operand (type, arg0, arg1);
6600
6601 /* Since negative shift count is not well-defined,
6602 don't try to compute it in the compiler. */
6603 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
6604 return t;
6605 /* Rewrite an LROTATE_EXPR by a constant into an
6606 RROTATE_EXPR by a new constant. */
6607 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
6608 {
6609 if (t == orig_t)
6610 t = copy_node (t);
6611 TREE_SET_CODE (t, RROTATE_EXPR);
6612 code = RROTATE_EXPR;
6613 TREE_OPERAND (t, 1) = arg1
6614 = const_binop
6615 (MINUS_EXPR,
6616 convert (TREE_TYPE (arg1),
6617 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
6618 arg1, 0);
6619 if (tree_int_cst_sgn (arg1) < 0)
6620 return t;
6621 }
6622
6623 /* If we have a rotate of a bit operation with the rotate count and
6624 the second operand of the bit operation both constant,
6625 permute the two operations. */
6626 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6627 && (TREE_CODE (arg0) == BIT_AND_EXPR
6628 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
6629 || TREE_CODE (arg0) == BIT_IOR_EXPR
6630 || TREE_CODE (arg0) == BIT_XOR_EXPR)
6631 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6632 return fold (build (TREE_CODE (arg0), type,
6633 fold (build (code, type,
6634 TREE_OPERAND (arg0, 0), arg1)),
6635 fold (build (code, type,
6636 TREE_OPERAND (arg0, 1), arg1))));
6637
6638 /* Two consecutive rotates adding up to the width of the mode can
6639 be ignored. */
6640 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6641 && TREE_CODE (arg0) == RROTATE_EXPR
6642 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6643 && TREE_INT_CST_HIGH (arg1) == 0
6644 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
6645 && ((TREE_INT_CST_LOW (arg1)
6646 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
6647 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
6648 return TREE_OPERAND (arg0, 0);
6649
6650 goto binary;
6651
6652 case MIN_EXPR:
6653 if (operand_equal_p (arg0, arg1, 0))
6654 return omit_one_operand (type, arg0, arg1);
6655 if (INTEGRAL_TYPE_P (type)
6656 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
6657 return omit_one_operand (type, arg1, arg0);
6658 goto associate;
6659
6660 case MAX_EXPR:
6661 if (operand_equal_p (arg0, arg1, 0))
6662 return omit_one_operand (type, arg0, arg1);
6663 if (INTEGRAL_TYPE_P (type)
6664 && TYPE_MAX_VALUE (type)
6665 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
6666 return omit_one_operand (type, arg1, arg0);
6667 goto associate;
6668
6669 case TRUTH_NOT_EXPR:
6670 /* Note that the operand of this must be an int
6671 and its values must be 0 or 1.
6672 ("true" is a fixed value perhaps depending on the language,
6673 but we don't handle values other than 1 correctly yet.) */
6674 tem = invert_truthvalue (arg0);
6675 /* Avoid infinite recursion. */
6676 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6677 {
6678 tem = fold_single_bit_test (code, arg0, arg1, type);
6679 if (tem)
6680 return tem;
6681 return t;
6682 }
6683 return convert (type, tem);
6684
6685 case TRUTH_ANDIF_EXPR:
6686 /* Note that the operands of this must be ints
6687 and their values must be 0 or 1.
6688 ("true" is a fixed value perhaps depending on the language.) */
6689 /* If first arg is constant zero, return it. */
6690 if (integer_zerop (arg0))
6691 return convert (type, arg0);
6692 case TRUTH_AND_EXPR:
6693 /* If either arg is constant true, drop it. */
6694 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6695 return non_lvalue (convert (type, arg1));
6696 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
6697 /* Preserve sequence points. */
6698 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
6699 return non_lvalue (convert (type, arg0));
6700 /* If second arg is constant zero, result is zero, but first arg
6701 must be evaluated. */
6702 if (integer_zerop (arg1))
6703 return omit_one_operand (type, arg1, arg0);
6704 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
6705 case will be handled here. */
6706 if (integer_zerop (arg0))
6707 return omit_one_operand (type, arg0, arg1);
6708
6709 truth_andor:
6710 /* We only do these simplifications if we are optimizing. */
6711 if (!optimize)
6712 return t;
6713
6714 /* Check for things like (A || B) && (A || C). We can convert this
6715 to A || (B && C). Note that either operator can be any of the four
6716 truth and/or operations and the transformation will still be
6717 valid. Also note that we only care about order for the
6718 ANDIF and ORIF operators. If B contains side effects, this
6719 might change the truth-value of A. */
6720 if (TREE_CODE (arg0) == TREE_CODE (arg1)
6721 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
6722 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
6723 || TREE_CODE (arg0) == TRUTH_AND_EXPR
6724 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
6725 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
6726 {
6727 tree a00 = TREE_OPERAND (arg0, 0);
6728 tree a01 = TREE_OPERAND (arg0, 1);
6729 tree a10 = TREE_OPERAND (arg1, 0);
6730 tree a11 = TREE_OPERAND (arg1, 1);
6731 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
6732 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
6733 && (code == TRUTH_AND_EXPR
6734 || code == TRUTH_OR_EXPR));
6735
6736 if (operand_equal_p (a00, a10, 0))
6737 return fold (build (TREE_CODE (arg0), type, a00,
6738 fold (build (code, type, a01, a11))));
6739 else if (commutative && operand_equal_p (a00, a11, 0))
6740 return fold (build (TREE_CODE (arg0), type, a00,
6741 fold (build (code, type, a01, a10))));
6742 else if (commutative && operand_equal_p (a01, a10, 0))
6743 return fold (build (TREE_CODE (arg0), type, a01,
6744 fold (build (code, type, a00, a11))));
6745
6746 /* This case if tricky because we must either have commutative
6747 operators or else A10 must not have side-effects. */
6748
6749 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
6750 && operand_equal_p (a01, a11, 0))
6751 return fold (build (TREE_CODE (arg0), type,
6752 fold (build (code, type, a00, a10)),
6753 a01));
6754 }
6755
6756 /* See if we can build a range comparison. */
6757 if (0 != (tem = fold_range_test (t)))
6758 return tem;
6759
6760 /* Check for the possibility of merging component references. If our
6761 lhs is another similar operation, try to merge its rhs with our
6762 rhs. Then try to merge our lhs and rhs. */
6763 if (TREE_CODE (arg0) == code
6764 && 0 != (tem = fold_truthop (code, type,
6765 TREE_OPERAND (arg0, 1), arg1)))
6766 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6767
6768 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
6769 return tem;
6770
6771 return t;
6772
6773 case TRUTH_ORIF_EXPR:
6774 /* Note that the operands of this must be ints
6775 and their values must be 0 or true.
6776 ("true" is a fixed value perhaps depending on the language.) */
6777 /* If first arg is constant true, return it. */
6778 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6779 return convert (type, arg0);
6780 case TRUTH_OR_EXPR:
6781 /* If either arg is constant zero, drop it. */
6782 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
6783 return non_lvalue (convert (type, arg1));
6784 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
6785 /* Preserve sequence points. */
6786 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
6787 return non_lvalue (convert (type, arg0));
6788 /* If second arg is constant true, result is true, but we must
6789 evaluate first arg. */
6790 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
6791 return omit_one_operand (type, arg1, arg0);
6792 /* Likewise for first arg, but note this only occurs here for
6793 TRUTH_OR_EXPR. */
6794 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6795 return omit_one_operand (type, arg0, arg1);
6796 goto truth_andor;
6797
6798 case TRUTH_XOR_EXPR:
6799 /* If either arg is constant zero, drop it. */
6800 if (integer_zerop (arg0))
6801 return non_lvalue (convert (type, arg1));
6802 if (integer_zerop (arg1))
6803 return non_lvalue (convert (type, arg0));
6804 /* If either arg is constant true, this is a logical inversion. */
6805 if (integer_onep (arg0))
6806 return non_lvalue (convert (type, invert_truthvalue (arg1)));
6807 if (integer_onep (arg1))
6808 return non_lvalue (convert (type, invert_truthvalue (arg0)));
6809 return t;
6810
6811 case EQ_EXPR:
6812 case NE_EXPR:
6813 case LT_EXPR:
6814 case GT_EXPR:
6815 case LE_EXPR:
6816 case GE_EXPR:
6817 /* If one arg is a real or integer constant, put it last. */
6818 if ((TREE_CODE (arg0) == INTEGER_CST
6819 && TREE_CODE (arg1) != INTEGER_CST)
6820 || (TREE_CODE (arg0) == REAL_CST
6821 && TREE_CODE (arg0) != REAL_CST))
6822 {
6823 if (t == orig_t)
6824 t = copy_node (t);
6825 TREE_OPERAND (t, 0) = arg1;
6826 TREE_OPERAND (t, 1) = arg0;
6827 arg0 = TREE_OPERAND (t, 0);
6828 arg1 = TREE_OPERAND (t, 1);
6829 code = swap_tree_comparison (code);
6830 TREE_SET_CODE (t, code);
6831 }
6832
6833 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
6834 {
6835 tree targ0 = strip_float_extensions (arg0);
6836 tree targ1 = strip_float_extensions (arg1);
6837 tree newtype = TREE_TYPE (targ0);
6838
6839 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
6840 newtype = TREE_TYPE (targ1);
6841
6842 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
6843 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
6844 return fold (build (code, type, convert (newtype, targ0),
6845 convert (newtype, targ1)));
6846
6847 /* (-a) CMP (-b) -> b CMP a */
6848 if (TREE_CODE (arg0) == NEGATE_EXPR
6849 && TREE_CODE (arg1) == NEGATE_EXPR)
6850 return fold (build (code, type, TREE_OPERAND (arg1, 0),
6851 TREE_OPERAND (arg0, 0)));
6852
6853 if (TREE_CODE (arg1) == REAL_CST)
6854 {
6855 REAL_VALUE_TYPE cst;
6856 cst = TREE_REAL_CST (arg1);
6857
6858 /* (-a) CMP CST -> a swap(CMP) (-CST) */
6859 if (TREE_CODE (arg0) == NEGATE_EXPR)
6860 return
6861 fold (build (swap_tree_comparison (code), type,
6862 TREE_OPERAND (arg0, 0),
6863 build_real (TREE_TYPE (arg1),
6864 REAL_VALUE_NEGATE (cst))));
6865
6866 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
6867 /* a CMP (-0) -> a CMP 0 */
6868 if (REAL_VALUE_MINUS_ZERO (cst))
6869 return fold (build (code, type, arg0,
6870 build_real (TREE_TYPE (arg1), dconst0)));
6871
6872 /* x != NaN is always true, other ops are always false. */
6873 if (REAL_VALUE_ISNAN (cst)
6874 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
6875 {
6876 t = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
6877 return omit_one_operand (type, convert (type, t), arg0);
6878 }
6879
6880 /* Fold comparisons against infinity. */
6881 if (REAL_VALUE_ISINF (cst))
6882 {
6883 tem = fold_inf_compare (code, type, arg0, arg1);
6884 if (tem != NULL_TREE)
6885 return tem;
6886 }
6887 }
6888
6889 /* If this is a comparison of a real constant with a PLUS_EXPR
6890 or a MINUS_EXPR of a real constant, we can convert it into a
6891 comparison with a revised real constant as long as no overflow
6892 occurs when unsafe_math_optimizations are enabled. */
6893 if (flag_unsafe_math_optimizations
6894 && TREE_CODE (arg1) == REAL_CST
6895 && (TREE_CODE (arg0) == PLUS_EXPR
6896 || TREE_CODE (arg0) == MINUS_EXPR)
6897 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6898 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6899 ? MINUS_EXPR : PLUS_EXPR,
6900 arg1, TREE_OPERAND (arg0, 1), 0))
6901 && ! TREE_CONSTANT_OVERFLOW (tem))
6902 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6903
6904 /* Likewise, we can simplify a comparison of a real constant with
6905 a MINUS_EXPR whose first operand is also a real constant, i.e.
6906 (c1 - x) < c2 becomes x > c1-c2. */
6907 if (flag_unsafe_math_optimizations
6908 && TREE_CODE (arg1) == REAL_CST
6909 && TREE_CODE (arg0) == MINUS_EXPR
6910 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
6911 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
6912 arg1, 0))
6913 && ! TREE_CONSTANT_OVERFLOW (tem))
6914 return fold (build (swap_tree_comparison (code), type,
6915 TREE_OPERAND (arg0, 1), tem));
6916
6917 /* Fold comparisons against built-in math functions. */
6918 if (TREE_CODE (arg1) == REAL_CST
6919 && flag_unsafe_math_optimizations
6920 && ! flag_errno_math)
6921 {
6922 enum built_in_function fcode = builtin_mathfn_code (arg0);
6923
6924 if (fcode != END_BUILTINS)
6925 {
6926 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
6927 if (tem != NULL_TREE)
6928 return tem;
6929 }
6930 }
6931 }
6932
6933 /* Convert foo++ == CONST into ++foo == CONST + INCR.
6934 First, see if one arg is constant; find the constant arg
6935 and the other one. */
6936 {
6937 tree constop = 0, varop = NULL_TREE;
6938 int constopnum = -1;
6939
6940 if (TREE_CONSTANT (arg1))
6941 constopnum = 1, constop = arg1, varop = arg0;
6942 if (TREE_CONSTANT (arg0))
6943 constopnum = 0, constop = arg0, varop = arg1;
6944
6945 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
6946 {
6947 /* This optimization is invalid for ordered comparisons
6948 if CONST+INCR overflows or if foo+incr might overflow.
6949 This optimization is invalid for floating point due to rounding.
6950 For pointer types we assume overflow doesn't happen. */
6951 if (POINTER_TYPE_P (TREE_TYPE (varop))
6952 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6953 && (code == EQ_EXPR || code == NE_EXPR)))
6954 {
6955 tree newconst
6956 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
6957 constop, TREE_OPERAND (varop, 1)));
6958
6959 /* Do not overwrite the current varop to be a preincrement,
6960 create a new node so that we won't confuse our caller who
6961 might create trees and throw them away, reusing the
6962 arguments that they passed to build. This shows up in
6963 the THEN or ELSE parts of ?: being postincrements. */
6964 varop = build (PREINCREMENT_EXPR, TREE_TYPE (varop),
6965 TREE_OPERAND (varop, 0),
6966 TREE_OPERAND (varop, 1));
6967
6968 /* If VAROP is a reference to a bitfield, we must mask
6969 the constant by the width of the field. */
6970 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6971 && DECL_BIT_FIELD(TREE_OPERAND
6972 (TREE_OPERAND (varop, 0), 1)))
6973 {
6974 int size
6975 = TREE_INT_CST_LOW (DECL_SIZE
6976 (TREE_OPERAND
6977 (TREE_OPERAND (varop, 0), 1)));
6978 tree mask, unsigned_type;
6979 unsigned int precision;
6980 tree folded_compare;
6981
6982 /* First check whether the comparison would come out
6983 always the same. If we don't do that we would
6984 change the meaning with the masking. */
6985 if (constopnum == 0)
6986 folded_compare = fold (build (code, type, constop,
6987 TREE_OPERAND (varop, 0)));
6988 else
6989 folded_compare = fold (build (code, type,
6990 TREE_OPERAND (varop, 0),
6991 constop));
6992 if (integer_zerop (folded_compare)
6993 || integer_onep (folded_compare))
6994 return omit_one_operand (type, folded_compare, varop);
6995
6996 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
6997 precision = TYPE_PRECISION (unsigned_type);
6998 mask = build_int_2 (~0, ~0);
6999 TREE_TYPE (mask) = unsigned_type;
7000 force_fit_type (mask, 0);
7001 mask = const_binop (RSHIFT_EXPR, mask,
7002 size_int (precision - size), 0);
7003 newconst = fold (build (BIT_AND_EXPR,
7004 TREE_TYPE (varop), newconst,
7005 convert (TREE_TYPE (varop),
7006 mask)));
7007 }
7008
7009 t = build (code, type,
7010 (constopnum == 0) ? newconst : varop,
7011 (constopnum == 1) ? newconst : varop);
7012 return t;
7013 }
7014 }
7015 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
7016 {
7017 if (POINTER_TYPE_P (TREE_TYPE (varop))
7018 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
7019 && (code == EQ_EXPR || code == NE_EXPR)))
7020 {
7021 tree newconst
7022 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
7023 constop, TREE_OPERAND (varop, 1)));
7024
7025 /* Do not overwrite the current varop to be a predecrement,
7026 create a new node so that we won't confuse our caller who
7027 might create trees and throw them away, reusing the
7028 arguments that they passed to build. This shows up in
7029 the THEN or ELSE parts of ?: being postdecrements. */
7030 varop = build (PREDECREMENT_EXPR, TREE_TYPE (varop),
7031 TREE_OPERAND (varop, 0),
7032 TREE_OPERAND (varop, 1));
7033
7034 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7035 && DECL_BIT_FIELD(TREE_OPERAND
7036 (TREE_OPERAND (varop, 0), 1)))
7037 {
7038 int size
7039 = TREE_INT_CST_LOW (DECL_SIZE
7040 (TREE_OPERAND
7041 (TREE_OPERAND (varop, 0), 1)));
7042 tree mask, unsigned_type;
7043 unsigned int precision;
7044 tree folded_compare;
7045
7046 if (constopnum == 0)
7047 folded_compare = fold (build (code, type, constop,
7048 TREE_OPERAND (varop, 0)));
7049 else
7050 folded_compare = fold (build (code, type,
7051 TREE_OPERAND (varop, 0),
7052 constop));
7053 if (integer_zerop (folded_compare)
7054 || integer_onep (folded_compare))
7055 return omit_one_operand (type, folded_compare, varop);
7056
7057 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
7058 precision = TYPE_PRECISION (unsigned_type);
7059 mask = build_int_2 (~0, ~0);
7060 TREE_TYPE (mask) = TREE_TYPE (varop);
7061 force_fit_type (mask, 0);
7062 mask = const_binop (RSHIFT_EXPR, mask,
7063 size_int (precision - size), 0);
7064 newconst = fold (build (BIT_AND_EXPR,
7065 TREE_TYPE (varop), newconst,
7066 convert (TREE_TYPE (varop),
7067 mask)));
7068 }
7069
7070 t = build (code, type,
7071 (constopnum == 0) ? newconst : varop,
7072 (constopnum == 1) ? newconst : varop);
7073 return t;
7074 }
7075 }
7076 }
7077
7078 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7079 This transformation affects the cases which are handled in later
7080 optimizations involving comparisons with non-negative constants. */
7081 if (TREE_CODE (arg1) == INTEGER_CST
7082 && TREE_CODE (arg0) != INTEGER_CST
7083 && tree_int_cst_sgn (arg1) > 0)
7084 {
7085 switch (code)
7086 {
7087 case GE_EXPR:
7088 code = GT_EXPR;
7089 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7090 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7091 break;
7092
7093 case LT_EXPR:
7094 code = LE_EXPR;
7095 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7096 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7097 break;
7098
7099 default:
7100 break;
7101 }
7102 }
7103
7104 /* Comparisons with the highest or lowest possible integer of
7105 the specified size will have known values. */
7106 {
7107 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7108
7109 if (TREE_CODE (arg1) == INTEGER_CST
7110 && ! TREE_CONSTANT_OVERFLOW (arg1)
7111 && width <= HOST_BITS_PER_WIDE_INT
7112 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7113 || POINTER_TYPE_P (TREE_TYPE (arg1))))
7114 {
7115 unsigned HOST_WIDE_INT signed_max;
7116 unsigned HOST_WIDE_INT max, min;
7117
7118 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7119
7120 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7121 {
7122 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7123 min = 0;
7124 }
7125 else
7126 {
7127 max = signed_max;
7128 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7129 }
7130
7131 if (TREE_INT_CST_HIGH (arg1) == 0
7132 && TREE_INT_CST_LOW (arg1) == max)
7133 switch (code)
7134 {
7135 case GT_EXPR:
7136 return omit_one_operand (type,
7137 convert (type, integer_zero_node),
7138 arg0);
7139 case GE_EXPR:
7140 code = EQ_EXPR;
7141 if (t == orig_t)
7142 t = copy_node (t);
7143 TREE_SET_CODE (t, EQ_EXPR);
7144 break;
7145 case LE_EXPR:
7146 return omit_one_operand (type,
7147 convert (type, integer_one_node),
7148 arg0);
7149 case LT_EXPR:
7150 code = NE_EXPR;
7151 if (t == orig_t)
7152 t = copy_node (t);
7153 TREE_SET_CODE (t, NE_EXPR);
7154 break;
7155
7156 /* The GE_EXPR and LT_EXPR cases above are not normally
7157 reached because of previous transformations. */
7158
7159 default:
7160 break;
7161 }
7162 else if (TREE_INT_CST_HIGH (arg1) == 0
7163 && TREE_INT_CST_LOW (arg1) == max - 1)
7164 switch (code)
7165 {
7166 case GT_EXPR:
7167 code = EQ_EXPR;
7168 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7169 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7170 break;
7171 case LE_EXPR:
7172 code = NE_EXPR;
7173 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7174 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7175 break;
7176 default:
7177 break;
7178 }
7179 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7180 && TREE_INT_CST_LOW (arg1) == min)
7181 switch (code)
7182 {
7183 case LT_EXPR:
7184 return omit_one_operand (type,
7185 convert (type, integer_zero_node),
7186 arg0);
7187 case LE_EXPR:
7188 code = EQ_EXPR;
7189 if (t == orig_t)
7190 t = copy_node (t);
7191 TREE_SET_CODE (t, EQ_EXPR);
7192 break;
7193
7194 case GE_EXPR:
7195 return omit_one_operand (type,
7196 convert (type, integer_one_node),
7197 arg0);
7198 case GT_EXPR:
7199 code = NE_EXPR;
7200 if (t == orig_t)
7201 t = copy_node (t);
7202 TREE_SET_CODE (t, NE_EXPR);
7203 break;
7204
7205 default:
7206 break;
7207 }
7208 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7209 && TREE_INT_CST_LOW (arg1) == min + 1)
7210 switch (code)
7211 {
7212 case GE_EXPR:
7213 code = NE_EXPR;
7214 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7215 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7216 break;
7217 case LT_EXPR:
7218 code = EQ_EXPR;
7219 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7220 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7221 break;
7222 default:
7223 break;
7224 }
7225
7226 else if (TREE_INT_CST_HIGH (arg1) == 0
7227 && TREE_INT_CST_LOW (arg1) == signed_max
7228 && TREE_UNSIGNED (TREE_TYPE (arg1))
7229 /* signed_type does not work on pointer types. */
7230 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7231 {
7232 /* The following case also applies to X < signed_max+1
7233 and X >= signed_max+1 because previous transformations. */
7234 if (code == LE_EXPR || code == GT_EXPR)
7235 {
7236 tree st0, st1;
7237 st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
7238 st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
7239 return fold
7240 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7241 type, convert (st0, arg0),
7242 convert (st1, integer_zero_node)));
7243 }
7244 }
7245 }
7246 }
7247
7248 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7249 a MINUS_EXPR of a constant, we can convert it into a comparison with
7250 a revised constant as long as no overflow occurs. */
7251 if ((code == EQ_EXPR || code == NE_EXPR)
7252 && TREE_CODE (arg1) == INTEGER_CST
7253 && (TREE_CODE (arg0) == PLUS_EXPR
7254 || TREE_CODE (arg0) == MINUS_EXPR)
7255 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7256 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7257 ? MINUS_EXPR : PLUS_EXPR,
7258 arg1, TREE_OPERAND (arg0, 1), 0))
7259 && ! TREE_CONSTANT_OVERFLOW (tem))
7260 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7261
7262 /* Similarly for a NEGATE_EXPR. */
7263 else if ((code == EQ_EXPR || code == NE_EXPR)
7264 && TREE_CODE (arg0) == NEGATE_EXPR
7265 && TREE_CODE (arg1) == INTEGER_CST
7266 && 0 != (tem = negate_expr (arg1))
7267 && TREE_CODE (tem) == INTEGER_CST
7268 && ! TREE_CONSTANT_OVERFLOW (tem))
7269 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7270
7271 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7272 for !=. Don't do this for ordered comparisons due to overflow. */
7273 else if ((code == NE_EXPR || code == EQ_EXPR)
7274 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7275 return fold (build (code, type,
7276 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7277
7278 /* If we are widening one operand of an integer comparison,
7279 see if the other operand is similarly being widened. Perhaps we
7280 can do the comparison in the narrower type. */
7281 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7282 && TREE_CODE (arg0) == NOP_EXPR
7283 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7284 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7285 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7286 || (TREE_CODE (t1) == INTEGER_CST
7287 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7288 return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
7289
7290 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7291 constant, we can simplify it. */
7292 else if (TREE_CODE (arg1) == INTEGER_CST
7293 && (TREE_CODE (arg0) == MIN_EXPR
7294 || TREE_CODE (arg0) == MAX_EXPR)
7295 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7296 return optimize_minmax_comparison (t);
7297
7298 /* If we are comparing an ABS_EXPR with a constant, we can
7299 convert all the cases into explicit comparisons, but they may
7300 well not be faster than doing the ABS and one comparison.
7301 But ABS (X) <= C is a range comparison, which becomes a subtraction
7302 and a comparison, and is probably faster. */
7303 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7304 && TREE_CODE (arg0) == ABS_EXPR
7305 && ! TREE_SIDE_EFFECTS (arg0)
7306 && (0 != (tem = negate_expr (arg1)))
7307 && TREE_CODE (tem) == INTEGER_CST
7308 && ! TREE_CONSTANT_OVERFLOW (tem))
7309 return fold (build (TRUTH_ANDIF_EXPR, type,
7310 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
7311 build (LE_EXPR, type,
7312 TREE_OPERAND (arg0, 0), arg1)));
7313
7314 /* If this is an EQ or NE comparison with zero and ARG0 is
7315 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7316 two operations, but the latter can be done in one less insn
7317 on machines that have only two-operand insns or on which a
7318 constant cannot be the first operand. */
7319 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7320 && TREE_CODE (arg0) == BIT_AND_EXPR)
7321 {
7322 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
7323 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
7324 return
7325 fold (build (code, type,
7326 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7327 build (RSHIFT_EXPR,
7328 TREE_TYPE (TREE_OPERAND (arg0, 0)),
7329 TREE_OPERAND (arg0, 1),
7330 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
7331 convert (TREE_TYPE (arg0),
7332 integer_one_node)),
7333 arg1));
7334 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7335 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7336 return
7337 fold (build (code, type,
7338 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7339 build (RSHIFT_EXPR,
7340 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7341 TREE_OPERAND (arg0, 0),
7342 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
7343 convert (TREE_TYPE (arg0),
7344 integer_one_node)),
7345 arg1));
7346 }
7347
7348 /* If this is an NE or EQ comparison of zero against the result of a
7349 signed MOD operation whose second operand is a power of 2, make
7350 the MOD operation unsigned since it is simpler and equivalent. */
7351 if ((code == NE_EXPR || code == EQ_EXPR)
7352 && integer_zerop (arg1)
7353 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
7354 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7355 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7356 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7357 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7358 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7359 {
7360 tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
7361 tree newmod = build (TREE_CODE (arg0), newtype,
7362 convert (newtype, TREE_OPERAND (arg0, 0)),
7363 convert (newtype, TREE_OPERAND (arg0, 1)));
7364
7365 return build (code, type, newmod, convert (newtype, arg1));
7366 }
7367
7368 /* If this is an NE comparison of zero with an AND of one, remove the
7369 comparison since the AND will give the correct value. */
7370 if (code == NE_EXPR && integer_zerop (arg1)
7371 && TREE_CODE (arg0) == BIT_AND_EXPR
7372 && integer_onep (TREE_OPERAND (arg0, 1)))
7373 return convert (type, arg0);
7374
7375 /* If we have (A & C) == C where C is a power of 2, convert this into
7376 (A & C) != 0. Similarly for NE_EXPR. */
7377 if ((code == EQ_EXPR || code == NE_EXPR)
7378 && TREE_CODE (arg0) == BIT_AND_EXPR
7379 && integer_pow2p (TREE_OPERAND (arg0, 1))
7380 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7381 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7382 arg0, integer_zero_node));
7383
7384 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7385 2, then fold the expression into shifts and logical operations. */
7386 tem = fold_single_bit_test (code, arg0, arg1, type);
7387 if (tem)
7388 return tem;
7389
7390 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7391 Similarly for NE_EXPR. */
7392 if ((code == EQ_EXPR || code == NE_EXPR)
7393 && TREE_CODE (arg0) == BIT_AND_EXPR
7394 && TREE_CODE (arg1) == INTEGER_CST
7395 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7396 {
7397 tree dandnotc = fold (build (BIT_ANDTC_EXPR, TREE_TYPE (arg0),
7398 arg1, TREE_OPERAND (arg0, 1)));
7399 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7400 if (!integer_zerop (dandnotc))
7401 return omit_one_operand (type, rslt, arg0);
7402 }
7403
7404 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7405 Similarly for NE_EXPR. */
7406 if ((code == EQ_EXPR || code == NE_EXPR)
7407 && TREE_CODE (arg0) == BIT_IOR_EXPR
7408 && TREE_CODE (arg1) == INTEGER_CST
7409 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7410 {
7411 tree candnotd = fold (build (BIT_ANDTC_EXPR, TREE_TYPE (arg0),
7412 TREE_OPERAND (arg0, 1), arg1));
7413 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7414 if (!integer_zerop (candnotd))
7415 return omit_one_operand (type, rslt, arg0);
7416 }
7417
7418 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7419 and similarly for >= into !=. */
7420 if ((code == LT_EXPR || code == GE_EXPR)
7421 && TREE_UNSIGNED (TREE_TYPE (arg0))
7422 && TREE_CODE (arg1) == LSHIFT_EXPR
7423 && integer_onep (TREE_OPERAND (arg1, 0)))
7424 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7425 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7426 TREE_OPERAND (arg1, 1)),
7427 convert (TREE_TYPE (arg0), integer_zero_node));
7428
7429 else if ((code == LT_EXPR || code == GE_EXPR)
7430 && TREE_UNSIGNED (TREE_TYPE (arg0))
7431 && (TREE_CODE (arg1) == NOP_EXPR
7432 || TREE_CODE (arg1) == CONVERT_EXPR)
7433 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7434 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7435 return
7436 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7437 convert (TREE_TYPE (arg0),
7438 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7439 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
7440 convert (TREE_TYPE (arg0), integer_zero_node));
7441
7442 /* Simplify comparison of something with itself. (For IEEE
7443 floating-point, we can only do some of these simplifications.) */
7444 if (operand_equal_p (arg0, arg1, 0))
7445 {
7446 switch (code)
7447 {
7448 case EQ_EXPR:
7449 case GE_EXPR:
7450 case LE_EXPR:
7451 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7452 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7453 return constant_boolean_node (1, type);
7454 code = EQ_EXPR;
7455 if (t == orig_t)
7456 t = copy_node (t);
7457 TREE_SET_CODE (t, code);
7458 break;
7459
7460 case NE_EXPR:
7461 /* For NE, we can only do this simplification if integer
7462 or we don't honor IEEE floating point NaNs. */
7463 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7464 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7465 break;
7466 /* ... fall through ... */
7467 case GT_EXPR:
7468 case LT_EXPR:
7469 return constant_boolean_node (0, type);
7470 default:
7471 abort ();
7472 }
7473 }
7474
7475 /* If we are comparing an expression that just has comparisons
7476 of two integer values, arithmetic expressions of those comparisons,
7477 and constants, we can simplify it. There are only three cases
7478 to check: the two values can either be equal, the first can be
7479 greater, or the second can be greater. Fold the expression for
7480 those three values. Since each value must be 0 or 1, we have
7481 eight possibilities, each of which corresponds to the constant 0
7482 or 1 or one of the six possible comparisons.
7483
7484 This handles common cases like (a > b) == 0 but also handles
7485 expressions like ((x > y) - (y > x)) > 0, which supposedly
7486 occur in macroized code. */
7487
7488 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7489 {
7490 tree cval1 = 0, cval2 = 0;
7491 int save_p = 0;
7492
7493 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7494 /* Don't handle degenerate cases here; they should already
7495 have been handled anyway. */
7496 && cval1 != 0 && cval2 != 0
7497 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7498 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7499 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7500 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7501 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7502 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7503 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7504 {
7505 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7506 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7507
7508 /* We can't just pass T to eval_subst in case cval1 or cval2
7509 was the same as ARG1. */
7510
7511 tree high_result
7512 = fold (build (code, type,
7513 eval_subst (arg0, cval1, maxval, cval2, minval),
7514 arg1));
7515 tree equal_result
7516 = fold (build (code, type,
7517 eval_subst (arg0, cval1, maxval, cval2, maxval),
7518 arg1));
7519 tree low_result
7520 = fold (build (code, type,
7521 eval_subst (arg0, cval1, minval, cval2, maxval),
7522 arg1));
7523
7524 /* All three of these results should be 0 or 1. Confirm they
7525 are. Then use those values to select the proper code
7526 to use. */
7527
7528 if ((integer_zerop (high_result)
7529 || integer_onep (high_result))
7530 && (integer_zerop (equal_result)
7531 || integer_onep (equal_result))
7532 && (integer_zerop (low_result)
7533 || integer_onep (low_result)))
7534 {
7535 /* Make a 3-bit mask with the high-order bit being the
7536 value for `>', the next for '=', and the low for '<'. */
7537 switch ((integer_onep (high_result) * 4)
7538 + (integer_onep (equal_result) * 2)
7539 + integer_onep (low_result))
7540 {
7541 case 0:
7542 /* Always false. */
7543 return omit_one_operand (type, integer_zero_node, arg0);
7544 case 1:
7545 code = LT_EXPR;
7546 break;
7547 case 2:
7548 code = EQ_EXPR;
7549 break;
7550 case 3:
7551 code = LE_EXPR;
7552 break;
7553 case 4:
7554 code = GT_EXPR;
7555 break;
7556 case 5:
7557 code = NE_EXPR;
7558 break;
7559 case 6:
7560 code = GE_EXPR;
7561 break;
7562 case 7:
7563 /* Always true. */
7564 return omit_one_operand (type, integer_one_node, arg0);
7565 }
7566
7567 t = build (code, type, cval1, cval2);
7568 if (save_p)
7569 return save_expr (t);
7570 else
7571 return fold (t);
7572 }
7573 }
7574 }
7575
7576 /* If this is a comparison of a field, we may be able to simplify it. */
7577 if (((TREE_CODE (arg0) == COMPONENT_REF
7578 && (*lang_hooks.can_use_bit_fields_p) ())
7579 || TREE_CODE (arg0) == BIT_FIELD_REF)
7580 && (code == EQ_EXPR || code == NE_EXPR)
7581 /* Handle the constant case even without -O
7582 to make sure the warnings are given. */
7583 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
7584 {
7585 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
7586 return t1 ? t1 : t;
7587 }
7588
7589 /* If this is a comparison of complex values and either or both sides
7590 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7591 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7592 This may prevent needless evaluations. */
7593 if ((code == EQ_EXPR || code == NE_EXPR)
7594 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
7595 && (TREE_CODE (arg0) == COMPLEX_EXPR
7596 || TREE_CODE (arg1) == COMPLEX_EXPR
7597 || TREE_CODE (arg0) == COMPLEX_CST
7598 || TREE_CODE (arg1) == COMPLEX_CST))
7599 {
7600 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
7601 tree real0, imag0, real1, imag1;
7602
7603 arg0 = save_expr (arg0);
7604 arg1 = save_expr (arg1);
7605 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
7606 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
7607 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
7608 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
7609
7610 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
7611 : TRUTH_ORIF_EXPR),
7612 type,
7613 fold (build (code, type, real0, real1)),
7614 fold (build (code, type, imag0, imag1))));
7615 }
7616
7617 /* Optimize comparisons of strlen vs zero to a compare of the
7618 first character of the string vs zero. To wit,
7619 strlen(ptr) == 0 => *ptr == 0
7620 strlen(ptr) != 0 => *ptr != 0
7621 Other cases should reduce to one of these two (or a constant)
7622 due to the return value of strlen being unsigned. */
7623 if ((code == EQ_EXPR || code == NE_EXPR)
7624 && integer_zerop (arg1)
7625 && TREE_CODE (arg0) == CALL_EXPR
7626 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR)
7627 {
7628 tree fndecl = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7629 tree arglist;
7630
7631 if (TREE_CODE (fndecl) == FUNCTION_DECL
7632 && DECL_BUILT_IN (fndecl)
7633 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
7634 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
7635 && (arglist = TREE_OPERAND (arg0, 1))
7636 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
7637 && ! TREE_CHAIN (arglist))
7638 return fold (build (code, type,
7639 build1 (INDIRECT_REF, char_type_node,
7640 TREE_VALUE(arglist)),
7641 integer_zero_node));
7642 }
7643
7644 /* From here on, the only cases we handle are when the result is
7645 known to be a constant.
7646
7647 To compute GT, swap the arguments and do LT.
7648 To compute GE, do LT and invert the result.
7649 To compute LE, swap the arguments, do LT and invert the result.
7650 To compute NE, do EQ and invert the result.
7651
7652 Therefore, the code below must handle only EQ and LT. */
7653
7654 if (code == LE_EXPR || code == GT_EXPR)
7655 {
7656 tem = arg0, arg0 = arg1, arg1 = tem;
7657 code = swap_tree_comparison (code);
7658 }
7659
7660 /* Note that it is safe to invert for real values here because we
7661 will check below in the one case that it matters. */
7662
7663 t1 = NULL_TREE;
7664 invert = 0;
7665 if (code == NE_EXPR || code == GE_EXPR)
7666 {
7667 invert = 1;
7668 code = invert_tree_comparison (code);
7669 }
7670
7671 /* Compute a result for LT or EQ if args permit;
7672 otherwise return T. */
7673 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
7674 {
7675 if (code == EQ_EXPR)
7676 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
7677 else
7678 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
7679 ? INT_CST_LT_UNSIGNED (arg0, arg1)
7680 : INT_CST_LT (arg0, arg1)),
7681 0);
7682 }
7683
7684 #if 0 /* This is no longer useful, but breaks some real code. */
7685 /* Assume a nonexplicit constant cannot equal an explicit one,
7686 since such code would be undefined anyway.
7687 Exception: on sysvr4, using #pragma weak,
7688 a label can come out as 0. */
7689 else if (TREE_CODE (arg1) == INTEGER_CST
7690 && !integer_zerop (arg1)
7691 && TREE_CONSTANT (arg0)
7692 && TREE_CODE (arg0) == ADDR_EXPR
7693 && code == EQ_EXPR)
7694 t1 = build_int_2 (0, 0);
7695 #endif
7696 /* Two real constants can be compared explicitly. */
7697 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
7698 {
7699 /* If either operand is a NaN, the result is false with two
7700 exceptions: First, an NE_EXPR is true on NaNs, but that case
7701 is already handled correctly since we will be inverting the
7702 result for NE_EXPR. Second, if we had inverted a LE_EXPR
7703 or a GE_EXPR into a LT_EXPR, we must return true so that it
7704 will be inverted into false. */
7705
7706 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
7707 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
7708 t1 = build_int_2 (invert && code == LT_EXPR, 0);
7709
7710 else if (code == EQ_EXPR)
7711 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
7712 TREE_REAL_CST (arg1)),
7713 0);
7714 else
7715 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
7716 TREE_REAL_CST (arg1)),
7717 0);
7718 }
7719
7720 if (t1 == NULL_TREE)
7721 return t;
7722
7723 if (invert)
7724 TREE_INT_CST_LOW (t1) ^= 1;
7725
7726 TREE_TYPE (t1) = type;
7727 if (TREE_CODE (type) == BOOLEAN_TYPE)
7728 return (*lang_hooks.truthvalue_conversion) (t1);
7729 return t1;
7730
7731 case COND_EXPR:
7732 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
7733 so all simple results must be passed through pedantic_non_lvalue. */
7734 if (TREE_CODE (arg0) == INTEGER_CST)
7735 return pedantic_non_lvalue
7736 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
7737 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
7738 return pedantic_omit_one_operand (type, arg1, arg0);
7739
7740 /* If the second operand is zero, invert the comparison and swap
7741 the second and third operands. Likewise if the second operand
7742 is constant and the third is not or if the third operand is
7743 equivalent to the first operand of the comparison. */
7744
7745 if (integer_zerop (arg1)
7746 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
7747 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7748 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7749 TREE_OPERAND (t, 2),
7750 TREE_OPERAND (arg0, 1))))
7751 {
7752 /* See if this can be inverted. If it can't, possibly because
7753 it was a floating-point inequality comparison, don't do
7754 anything. */
7755 tem = invert_truthvalue (arg0);
7756
7757 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7758 {
7759 t = build (code, type, tem,
7760 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
7761 arg0 = tem;
7762 /* arg1 should be the first argument of the new T. */
7763 arg1 = TREE_OPERAND (t, 1);
7764 STRIP_NOPS (arg1);
7765 }
7766 }
7767
7768 /* If we have A op B ? A : C, we may be able to convert this to a
7769 simpler expression, depending on the operation and the values
7770 of B and C. Signed zeros prevent all of these transformations,
7771 for reasons given above each one. */
7772
7773 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7774 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7775 arg1, TREE_OPERAND (arg0, 1))
7776 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
7777 {
7778 tree arg2 = TREE_OPERAND (t, 2);
7779 enum tree_code comp_code = TREE_CODE (arg0);
7780
7781 STRIP_NOPS (arg2);
7782
7783 /* If we have A op 0 ? A : -A, consider applying the following
7784 transformations:
7785
7786 A == 0? A : -A same as -A
7787 A != 0? A : -A same as A
7788 A >= 0? A : -A same as abs (A)
7789 A > 0? A : -A same as abs (A)
7790 A <= 0? A : -A same as -abs (A)
7791 A < 0? A : -A same as -abs (A)
7792
7793 None of these transformations work for modes with signed
7794 zeros. If A is +/-0, the first two transformations will
7795 change the sign of the result (from +0 to -0, or vice
7796 versa). The last four will fix the sign of the result,
7797 even though the original expressions could be positive or
7798 negative, depending on the sign of A.
7799
7800 Note that all these transformations are correct if A is
7801 NaN, since the two alternatives (A and -A) are also NaNs. */
7802 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
7803 ? real_zerop (TREE_OPERAND (arg0, 1))
7804 : integer_zerop (TREE_OPERAND (arg0, 1)))
7805 && TREE_CODE (arg2) == NEGATE_EXPR
7806 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
7807 switch (comp_code)
7808 {
7809 case EQ_EXPR:
7810 return
7811 pedantic_non_lvalue
7812 (convert (type,
7813 negate_expr
7814 (convert (TREE_TYPE (TREE_OPERAND (t, 1)),
7815 arg1))));
7816 case NE_EXPR:
7817 return pedantic_non_lvalue (convert (type, arg1));
7818 case GE_EXPR:
7819 case GT_EXPR:
7820 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7821 arg1 = convert ((*lang_hooks.types.signed_type)
7822 (TREE_TYPE (arg1)), arg1);
7823 return pedantic_non_lvalue
7824 (convert (type, fold (build1 (ABS_EXPR,
7825 TREE_TYPE (arg1), arg1))));
7826 case LE_EXPR:
7827 case LT_EXPR:
7828 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7829 arg1 = convert ((lang_hooks.types.signed_type)
7830 (TREE_TYPE (arg1)), arg1);
7831 return pedantic_non_lvalue
7832 (negate_expr (convert (type,
7833 fold (build1 (ABS_EXPR,
7834 TREE_TYPE (arg1),
7835 arg1)))));
7836 default:
7837 abort ();
7838 }
7839
7840 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
7841 A == 0 ? A : 0 is always 0 unless A is -0. Note that
7842 both transformations are correct when A is NaN: A != 0
7843 is then true, and A == 0 is false. */
7844
7845 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
7846 {
7847 if (comp_code == NE_EXPR)
7848 return pedantic_non_lvalue (convert (type, arg1));
7849 else if (comp_code == EQ_EXPR)
7850 return pedantic_non_lvalue (convert (type, integer_zero_node));
7851 }
7852
7853 /* Try some transformations of A op B ? A : B.
7854
7855 A == B? A : B same as B
7856 A != B? A : B same as A
7857 A >= B? A : B same as max (A, B)
7858 A > B? A : B same as max (B, A)
7859 A <= B? A : B same as min (A, B)
7860 A < B? A : B same as min (B, A)
7861
7862 As above, these transformations don't work in the presence
7863 of signed zeros. For example, if A and B are zeros of
7864 opposite sign, the first two transformations will change
7865 the sign of the result. In the last four, the original
7866 expressions give different results for (A=+0, B=-0) and
7867 (A=-0, B=+0), but the transformed expressions do not.
7868
7869 The first two transformations are correct if either A or B
7870 is a NaN. In the first transformation, the condition will
7871 be false, and B will indeed be chosen. In the case of the
7872 second transformation, the condition A != B will be true,
7873 and A will be chosen.
7874
7875 The conversions to max() and min() are not correct if B is
7876 a number and A is not. The conditions in the original
7877 expressions will be false, so all four give B. The min()
7878 and max() versions would give a NaN instead. */
7879 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
7880 arg2, TREE_OPERAND (arg0, 0)))
7881 {
7882 tree comp_op0 = TREE_OPERAND (arg0, 0);
7883 tree comp_op1 = TREE_OPERAND (arg0, 1);
7884 tree comp_type = TREE_TYPE (comp_op0);
7885
7886 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
7887 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
7888 {
7889 comp_type = type;
7890 comp_op0 = arg1;
7891 comp_op1 = arg2;
7892 }
7893
7894 switch (comp_code)
7895 {
7896 case EQ_EXPR:
7897 return pedantic_non_lvalue (convert (type, arg2));
7898 case NE_EXPR:
7899 return pedantic_non_lvalue (convert (type, arg1));
7900 case LE_EXPR:
7901 case LT_EXPR:
7902 /* In C++ a ?: expression can be an lvalue, so put the
7903 operand which will be used if they are equal first
7904 so that we can convert this back to the
7905 corresponding COND_EXPR. */
7906 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
7907 return pedantic_non_lvalue
7908 (convert (type, fold (build (MIN_EXPR, comp_type,
7909 (comp_code == LE_EXPR
7910 ? comp_op0 : comp_op1),
7911 (comp_code == LE_EXPR
7912 ? comp_op1 : comp_op0)))));
7913 break;
7914 case GE_EXPR:
7915 case GT_EXPR:
7916 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
7917 return pedantic_non_lvalue
7918 (convert (type, fold (build (MAX_EXPR, comp_type,
7919 (comp_code == GE_EXPR
7920 ? comp_op0 : comp_op1),
7921 (comp_code == GE_EXPR
7922 ? comp_op1 : comp_op0)))));
7923 break;
7924 default:
7925 abort ();
7926 }
7927 }
7928
7929 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
7930 we might still be able to simplify this. For example,
7931 if C1 is one less or one more than C2, this might have started
7932 out as a MIN or MAX and been transformed by this function.
7933 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
7934
7935 if (INTEGRAL_TYPE_P (type)
7936 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7937 && TREE_CODE (arg2) == INTEGER_CST)
7938 switch (comp_code)
7939 {
7940 case EQ_EXPR:
7941 /* We can replace A with C1 in this case. */
7942 arg1 = convert (type, TREE_OPERAND (arg0, 1));
7943 t = build (code, type, TREE_OPERAND (t, 0), arg1,
7944 TREE_OPERAND (t, 2));
7945 break;
7946
7947 case LT_EXPR:
7948 /* If C1 is C2 + 1, this is min(A, C2). */
7949 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
7950 && operand_equal_p (TREE_OPERAND (arg0, 1),
7951 const_binop (PLUS_EXPR, arg2,
7952 integer_one_node, 0), 1))
7953 return pedantic_non_lvalue
7954 (fold (build (MIN_EXPR, type, arg1, arg2)));
7955 break;
7956
7957 case LE_EXPR:
7958 /* If C1 is C2 - 1, this is min(A, C2). */
7959 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
7960 && operand_equal_p (TREE_OPERAND (arg0, 1),
7961 const_binop (MINUS_EXPR, arg2,
7962 integer_one_node, 0), 1))
7963 return pedantic_non_lvalue
7964 (fold (build (MIN_EXPR, type, arg1, arg2)));
7965 break;
7966
7967 case GT_EXPR:
7968 /* If C1 is C2 - 1, this is max(A, C2). */
7969 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
7970 && operand_equal_p (TREE_OPERAND (arg0, 1),
7971 const_binop (MINUS_EXPR, arg2,
7972 integer_one_node, 0), 1))
7973 return pedantic_non_lvalue
7974 (fold (build (MAX_EXPR, type, arg1, arg2)));
7975 break;
7976
7977 case GE_EXPR:
7978 /* If C1 is C2 + 1, this is max(A, C2). */
7979 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
7980 && operand_equal_p (TREE_OPERAND (arg0, 1),
7981 const_binop (PLUS_EXPR, arg2,
7982 integer_one_node, 0), 1))
7983 return pedantic_non_lvalue
7984 (fold (build (MAX_EXPR, type, arg1, arg2)));
7985 break;
7986 case NE_EXPR:
7987 break;
7988 default:
7989 abort ();
7990 }
7991 }
7992
7993 /* If the second operand is simpler than the third, swap them
7994 since that produces better jump optimization results. */
7995 if ((TREE_CONSTANT (arg1) || DECL_P (arg1)
7996 || TREE_CODE (arg1) == SAVE_EXPR)
7997 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
7998 || DECL_P (TREE_OPERAND (t, 2))
7999 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
8000 {
8001 /* See if this can be inverted. If it can't, possibly because
8002 it was a floating-point inequality comparison, don't do
8003 anything. */
8004 tem = invert_truthvalue (arg0);
8005
8006 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8007 {
8008 t = build (code, type, tem,
8009 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
8010 arg0 = tem;
8011 /* arg1 should be the first argument of the new T. */
8012 arg1 = TREE_OPERAND (t, 1);
8013 STRIP_NOPS (arg1);
8014 }
8015 }
8016
8017 /* Convert A ? 1 : 0 to simply A. */
8018 if (integer_onep (TREE_OPERAND (t, 1))
8019 && integer_zerop (TREE_OPERAND (t, 2))
8020 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8021 call to fold will try to move the conversion inside
8022 a COND, which will recurse. In that case, the COND_EXPR
8023 is probably the best choice, so leave it alone. */
8024 && type == TREE_TYPE (arg0))
8025 return pedantic_non_lvalue (arg0);
8026
8027 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8028 over COND_EXPR in cases such as floating point comparisons. */
8029 if (integer_zerop (TREE_OPERAND (t, 1))
8030 && integer_onep (TREE_OPERAND (t, 2))
8031 && truth_value_p (TREE_CODE (arg0)))
8032 return pedantic_non_lvalue (convert (type,
8033 invert_truthvalue (arg0)));
8034
8035 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8036 operation is simply A & 2. */
8037
8038 if (integer_zerop (TREE_OPERAND (t, 2))
8039 && TREE_CODE (arg0) == NE_EXPR
8040 && integer_zerop (TREE_OPERAND (arg0, 1))
8041 && integer_pow2p (arg1)
8042 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8043 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8044 arg1, 1))
8045 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
8046
8047 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8048 if (integer_zerop (TREE_OPERAND (t, 2))
8049 && truth_value_p (TREE_CODE (arg0))
8050 && truth_value_p (TREE_CODE (arg1)))
8051 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
8052 arg0, arg1)));
8053
8054 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8055 if (integer_onep (TREE_OPERAND (t, 2))
8056 && truth_value_p (TREE_CODE (arg0))
8057 && truth_value_p (TREE_CODE (arg1)))
8058 {
8059 /* Only perform transformation if ARG0 is easily inverted. */
8060 tem = invert_truthvalue (arg0);
8061 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8062 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
8063 tem, arg1)));
8064 }
8065
8066 return t;
8067
8068 case COMPOUND_EXPR:
8069 /* When pedantic, a compound expression can be neither an lvalue
8070 nor an integer constant expression. */
8071 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
8072 return t;
8073 /* Don't let (0, 0) be null pointer constant. */
8074 if (integer_zerop (arg1))
8075 return build1 (NOP_EXPR, type, arg1);
8076 return convert (type, arg1);
8077
8078 case COMPLEX_EXPR:
8079 if (wins)
8080 return build_complex (type, arg0, arg1);
8081 return t;
8082
8083 case REALPART_EXPR:
8084 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8085 return t;
8086 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8087 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8088 TREE_OPERAND (arg0, 1));
8089 else if (TREE_CODE (arg0) == COMPLEX_CST)
8090 return TREE_REALPART (arg0);
8091 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8092 return fold (build (TREE_CODE (arg0), type,
8093 fold (build1 (REALPART_EXPR, type,
8094 TREE_OPERAND (arg0, 0))),
8095 fold (build1 (REALPART_EXPR,
8096 type, TREE_OPERAND (arg0, 1)))));
8097 return t;
8098
8099 case IMAGPART_EXPR:
8100 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8101 return convert (type, integer_zero_node);
8102 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8103 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8104 TREE_OPERAND (arg0, 0));
8105 else if (TREE_CODE (arg0) == COMPLEX_CST)
8106 return TREE_IMAGPART (arg0);
8107 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8108 return fold (build (TREE_CODE (arg0), type,
8109 fold (build1 (IMAGPART_EXPR, type,
8110 TREE_OPERAND (arg0, 0))),
8111 fold (build1 (IMAGPART_EXPR, type,
8112 TREE_OPERAND (arg0, 1)))));
8113 return t;
8114
8115 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8116 appropriate. */
8117 case CLEANUP_POINT_EXPR:
8118 if (! has_cleanups (arg0))
8119 return TREE_OPERAND (t, 0);
8120
8121 {
8122 enum tree_code code0 = TREE_CODE (arg0);
8123 int kind0 = TREE_CODE_CLASS (code0);
8124 tree arg00 = TREE_OPERAND (arg0, 0);
8125 tree arg01;
8126
8127 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8128 return fold (build1 (code0, type,
8129 fold (build1 (CLEANUP_POINT_EXPR,
8130 TREE_TYPE (arg00), arg00))));
8131
8132 if (kind0 == '<' || kind0 == '2'
8133 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8134 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8135 || code0 == TRUTH_XOR_EXPR)
8136 {
8137 arg01 = TREE_OPERAND (arg0, 1);
8138
8139 if (TREE_CONSTANT (arg00)
8140 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8141 && ! has_cleanups (arg00)))
8142 return fold (build (code0, type, arg00,
8143 fold (build1 (CLEANUP_POINT_EXPR,
8144 TREE_TYPE (arg01), arg01))));
8145
8146 if (TREE_CONSTANT (arg01))
8147 return fold (build (code0, type,
8148 fold (build1 (CLEANUP_POINT_EXPR,
8149 TREE_TYPE (arg00), arg00)),
8150 arg01));
8151 }
8152
8153 return t;
8154 }
8155
8156 case CALL_EXPR:
8157 /* Check for a built-in function. */
8158 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
8159 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
8160 == FUNCTION_DECL)
8161 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
8162 {
8163 tree tmp = fold_builtin (expr);
8164 if (tmp)
8165 return tmp;
8166 }
8167 return t;
8168
8169 default:
8170 return t;
8171 } /* switch (code) */
8172 }
8173
8174 #ifdef ENABLE_FOLD_CHECKING
8175 #undef fold
8176
8177 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8178 static void fold_check_failed (tree, tree);
8179 void print_fold_checksum (tree);
8180
8181 /* When --enable-checking=fold, compute a digest of expr before
8182 and after actual fold call to see if fold did not accidentally
8183 change original expr. */
8184
8185 tree
8186 fold (tree expr)
8187 {
8188 tree ret;
8189 struct md5_ctx ctx;
8190 unsigned char checksum_before[16], checksum_after[16];
8191 htab_t ht;
8192
8193 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8194 md5_init_ctx (&ctx);
8195 fold_checksum_tree (expr, &ctx, ht);
8196 md5_finish_ctx (&ctx, checksum_before);
8197 htab_empty (ht);
8198
8199 ret = fold_1 (expr);
8200
8201 md5_init_ctx (&ctx);
8202 fold_checksum_tree (expr, &ctx, ht);
8203 md5_finish_ctx (&ctx, checksum_after);
8204 htab_delete (ht);
8205
8206 if (memcmp (checksum_before, checksum_after, 16))
8207 fold_check_failed (expr, ret);
8208
8209 return ret;
8210 }
8211
8212 void
8213 print_fold_checksum (tree expr)
8214 {
8215 struct md5_ctx ctx;
8216 unsigned char checksum[16], cnt;
8217 htab_t ht;
8218
8219 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8220 md5_init_ctx (&ctx);
8221 fold_checksum_tree (expr, &ctx, ht);
8222 md5_finish_ctx (&ctx, checksum);
8223 htab_delete (ht);
8224 for (cnt = 0; cnt < 16; ++cnt)
8225 fprintf (stderr, "%02x", checksum[cnt]);
8226 putc ('\n', stderr);
8227 }
8228
8229 static void
8230 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8231 {
8232 internal_error ("fold check: original tree changed by fold");
8233 }
8234
8235 static void
8236 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8237 {
8238 void **slot;
8239 enum tree_code code;
8240 char buf[sizeof (struct tree_decl)];
8241 int i, len;
8242
8243 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8244 > sizeof (struct tree_decl)
8245 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8246 abort ();
8247 if (expr == NULL)
8248 return;
8249 slot = htab_find_slot (ht, expr, INSERT);
8250 if (*slot != NULL)
8251 return;
8252 *slot = expr;
8253 code = TREE_CODE (expr);
8254 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8255 {
8256 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8257 memcpy (buf, expr, tree_size (expr));
8258 expr = (tree) buf;
8259 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8260 }
8261 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8262 {
8263 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8264 memcpy (buf, expr, tree_size (expr));
8265 expr = (tree) buf;
8266 SET_DECL_ASSEMBLER_NAME (expr, NULL);
8267 }
8268 else if (TREE_CODE_CLASS (code) == 't'
8269 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8270 {
8271 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8272 memcpy (buf, expr, tree_size (expr));
8273 expr = (tree) buf;
8274 TYPE_POINTER_TO (expr) = NULL;
8275 TYPE_REFERENCE_TO (expr) = NULL;
8276 }
8277 md5_process_bytes (expr, tree_size (expr), ctx);
8278 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8279 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8280 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8281 len = TREE_CODE_LENGTH (code);
8282 switch (TREE_CODE_CLASS (code))
8283 {
8284 case 'c':
8285 switch (code)
8286 {
8287 case STRING_CST:
8288 md5_process_bytes (TREE_STRING_POINTER (expr),
8289 TREE_STRING_LENGTH (expr), ctx);
8290 break;
8291 case COMPLEX_CST:
8292 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8293 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8294 break;
8295 case VECTOR_CST:
8296 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8297 break;
8298 default:
8299 break;
8300 }
8301 break;
8302 case 'x':
8303 switch (code)
8304 {
8305 case TREE_LIST:
8306 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8307 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8308 break;
8309 case TREE_VEC:
8310 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8311 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8312 break;
8313 default:
8314 break;
8315 }
8316 break;
8317 case 'e':
8318 switch (code)
8319 {
8320 case SAVE_EXPR: len = 2; break;
8321 case GOTO_SUBROUTINE_EXPR: len = 0; break;
8322 case RTL_EXPR: len = 0; break;
8323 case WITH_CLEANUP_EXPR: len = 2; break;
8324 default: break;
8325 }
8326 /* FALLTHROUGH */
8327 case 'r':
8328 case '<':
8329 case '1':
8330 case '2':
8331 case 's':
8332 for (i = 0; i < len; ++i)
8333 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8334 break;
8335 case 'd':
8336 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8337 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8338 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8339 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8340 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8341 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8342 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8343 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8344 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8345 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8346 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8347 break;
8348 case 't':
8349 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
8350 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
8351 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
8352 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
8353 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
8354 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
8355 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
8356 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
8357 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
8358 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
8359 break;
8360 default:
8361 break;
8362 }
8363 }
8364
8365 #endif
8366
8367 /* Perform constant folding and related simplification of intializer
8368 expression EXPR. This behaves identically to "fold" but ignores
8369 potential run-time traps and exceptions that fold must preserve. */
8370
8371 tree
8372 fold_initializer (tree expr)
8373 {
8374 int saved_signaling_nans = flag_signaling_nans;
8375 int saved_trapping_math = flag_trapping_math;
8376 int saved_trapv = flag_trapv;
8377 tree result;
8378
8379 flag_signaling_nans = 0;
8380 flag_trapping_math = 0;
8381 flag_trapv = 0;
8382
8383 result = fold (expr);
8384
8385 flag_signaling_nans = saved_signaling_nans;
8386 flag_trapping_math = saved_trapping_math;
8387 flag_trapv = saved_trapv;
8388
8389 return result;
8390 }
8391
8392 /* Determine if first argument is a multiple of second argument. Return 0 if
8393 it is not, or we cannot easily determined it to be.
8394
8395 An example of the sort of thing we care about (at this point; this routine
8396 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8397 fold cases do now) is discovering that
8398
8399 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8400
8401 is a multiple of
8402
8403 SAVE_EXPR (J * 8)
8404
8405 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8406
8407 This code also handles discovering that
8408
8409 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8410
8411 is a multiple of 8 so we don't have to worry about dealing with a
8412 possible remainder.
8413
8414 Note that we *look* inside a SAVE_EXPR only to determine how it was
8415 calculated; it is not safe for fold to do much of anything else with the
8416 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8417 at run time. For example, the latter example above *cannot* be implemented
8418 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8419 evaluation time of the original SAVE_EXPR is not necessarily the same at
8420 the time the new expression is evaluated. The only optimization of this
8421 sort that would be valid is changing
8422
8423 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8424
8425 divided by 8 to
8426
8427 SAVE_EXPR (I) * SAVE_EXPR (J)
8428
8429 (where the same SAVE_EXPR (J) is used in the original and the
8430 transformed version). */
8431
8432 static int
8433 multiple_of_p (tree type, tree top, tree bottom)
8434 {
8435 if (operand_equal_p (top, bottom, 0))
8436 return 1;
8437
8438 if (TREE_CODE (type) != INTEGER_TYPE)
8439 return 0;
8440
8441 switch (TREE_CODE (top))
8442 {
8443 case MULT_EXPR:
8444 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8445 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8446
8447 case PLUS_EXPR:
8448 case MINUS_EXPR:
8449 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8450 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8451
8452 case LSHIFT_EXPR:
8453 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
8454 {
8455 tree op1, t1;
8456
8457 op1 = TREE_OPERAND (top, 1);
8458 /* const_binop may not detect overflow correctly,
8459 so check for it explicitly here. */
8460 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
8461 > TREE_INT_CST_LOW (op1)
8462 && TREE_INT_CST_HIGH (op1) == 0
8463 && 0 != (t1 = convert (type,
8464 const_binop (LSHIFT_EXPR, size_one_node,
8465 op1, 0)))
8466 && ! TREE_OVERFLOW (t1))
8467 return multiple_of_p (type, t1, bottom);
8468 }
8469 return 0;
8470
8471 case NOP_EXPR:
8472 /* Can't handle conversions from non-integral or wider integral type. */
8473 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
8474 || (TYPE_PRECISION (type)
8475 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
8476 return 0;
8477
8478 /* .. fall through ... */
8479
8480 case SAVE_EXPR:
8481 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
8482
8483 case INTEGER_CST:
8484 if (TREE_CODE (bottom) != INTEGER_CST
8485 || (TREE_UNSIGNED (type)
8486 && (tree_int_cst_sgn (top) < 0
8487 || tree_int_cst_sgn (bottom) < 0)))
8488 return 0;
8489 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
8490 top, bottom, 0));
8491
8492 default:
8493 return 0;
8494 }
8495 }
8496
8497 /* Return true if `t' is known to be non-negative. */
8498
8499 int
8500 tree_expr_nonnegative_p (tree t)
8501 {
8502 switch (TREE_CODE (t))
8503 {
8504 case ABS_EXPR:
8505 case FFS_EXPR:
8506 case POPCOUNT_EXPR:
8507 case PARITY_EXPR:
8508 return 1;
8509
8510 case CLZ_EXPR:
8511 case CTZ_EXPR:
8512 /* These are undefined at zero. This is true even if
8513 C[LT]Z_DEFINED_VALUE_AT_ZERO is set, since what we're
8514 computing here is a user-visible property. */
8515 return 0;
8516
8517 case INTEGER_CST:
8518 return tree_int_cst_sgn (t) >= 0;
8519
8520 case REAL_CST:
8521 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
8522
8523 case PLUS_EXPR:
8524 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8525 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8526 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8527
8528 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8529 both unsigned and at least 2 bits shorter than the result. */
8530 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8531 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8532 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8533 {
8534 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8535 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8536 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8537 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8538 {
8539 unsigned int prec = MAX (TYPE_PRECISION (inner1),
8540 TYPE_PRECISION (inner2)) + 1;
8541 return prec < TYPE_PRECISION (TREE_TYPE (t));
8542 }
8543 }
8544 break;
8545
8546 case MULT_EXPR:
8547 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8548 {
8549 /* x * x for floating point x is always non-negative. */
8550 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8551 return 1;
8552 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8553 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8554 }
8555
8556 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8557 both unsigned and their total bits is shorter than the result. */
8558 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8559 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8560 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8561 {
8562 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8563 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8564 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8565 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8566 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8567 < TYPE_PRECISION (TREE_TYPE (t));
8568 }
8569 return 0;
8570
8571 case TRUNC_DIV_EXPR:
8572 case CEIL_DIV_EXPR:
8573 case FLOOR_DIV_EXPR:
8574 case ROUND_DIV_EXPR:
8575 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8576 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8577
8578 case TRUNC_MOD_EXPR:
8579 case CEIL_MOD_EXPR:
8580 case FLOOR_MOD_EXPR:
8581 case ROUND_MOD_EXPR:
8582 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8583
8584 case RDIV_EXPR:
8585 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8586 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8587
8588 case NOP_EXPR:
8589 {
8590 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8591 tree outer_type = TREE_TYPE (t);
8592
8593 if (TREE_CODE (outer_type) == REAL_TYPE)
8594 {
8595 if (TREE_CODE (inner_type) == REAL_TYPE)
8596 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8597 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8598 {
8599 if (TREE_UNSIGNED (inner_type))
8600 return 1;
8601 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8602 }
8603 }
8604 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8605 {
8606 if (TREE_CODE (inner_type) == REAL_TYPE)
8607 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8608 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8609 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8610 && TREE_UNSIGNED (inner_type);
8611 }
8612 }
8613 break;
8614
8615 case COND_EXPR:
8616 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8617 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
8618 case COMPOUND_EXPR:
8619 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8620 case MIN_EXPR:
8621 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8622 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8623 case MAX_EXPR:
8624 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8625 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8626 case MODIFY_EXPR:
8627 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8628 case BIND_EXPR:
8629 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8630 case SAVE_EXPR:
8631 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8632 case NON_LVALUE_EXPR:
8633 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8634 case FLOAT_EXPR:
8635 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8636 case RTL_EXPR:
8637 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
8638
8639 case CALL_EXPR:
8640 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
8641 {
8642 tree fndecl = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
8643 tree arglist = TREE_OPERAND (t, 1);
8644 if (TREE_CODE (fndecl) == FUNCTION_DECL
8645 && DECL_BUILT_IN (fndecl)
8646 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
8647 switch (DECL_FUNCTION_CODE (fndecl))
8648 {
8649 case BUILT_IN_CABS:
8650 case BUILT_IN_CABSL:
8651 case BUILT_IN_CABSF:
8652 case BUILT_IN_EXP:
8653 case BUILT_IN_EXPF:
8654 case BUILT_IN_EXPL:
8655 case BUILT_IN_FABS:
8656 case BUILT_IN_FABSF:
8657 case BUILT_IN_FABSL:
8658 case BUILT_IN_SQRT:
8659 case BUILT_IN_SQRTF:
8660 case BUILT_IN_SQRTL:
8661 return 1;
8662
8663 case BUILT_IN_ATAN:
8664 case BUILT_IN_ATANF:
8665 case BUILT_IN_ATANL:
8666 case BUILT_IN_CEIL:
8667 case BUILT_IN_CEILF:
8668 case BUILT_IN_CEILL:
8669 case BUILT_IN_FLOOR:
8670 case BUILT_IN_FLOORF:
8671 case BUILT_IN_FLOORL:
8672 case BUILT_IN_NEARBYINT:
8673 case BUILT_IN_NEARBYINTF:
8674 case BUILT_IN_NEARBYINTL:
8675 case BUILT_IN_ROUND:
8676 case BUILT_IN_ROUNDF:
8677 case BUILT_IN_ROUNDL:
8678 case BUILT_IN_TRUNC:
8679 case BUILT_IN_TRUNCF:
8680 case BUILT_IN_TRUNCL:
8681 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8682
8683 case BUILT_IN_POW:
8684 case BUILT_IN_POWF:
8685 case BUILT_IN_POWL:
8686 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8687
8688 default:
8689 break;
8690 }
8691 }
8692
8693 /* ... fall through ... */
8694
8695 default:
8696 if (truth_value_p (TREE_CODE (t)))
8697 /* Truth values evaluate to 0 or 1, which is nonnegative. */
8698 return 1;
8699 }
8700
8701 /* We don't know sign of `t', so be conservative and return false. */
8702 return 0;
8703 }
8704
8705 /* Return true if `r' is known to be non-negative.
8706 Only handles constants at the moment. */
8707
8708 int
8709 rtl_expr_nonnegative_p (rtx r)
8710 {
8711 switch (GET_CODE (r))
8712 {
8713 case CONST_INT:
8714 return INTVAL (r) >= 0;
8715
8716 case CONST_DOUBLE:
8717 if (GET_MODE (r) == VOIDmode)
8718 return CONST_DOUBLE_HIGH (r) >= 0;
8719 return 0;
8720
8721 case CONST_VECTOR:
8722 {
8723 int units, i;
8724 rtx elt;
8725
8726 units = CONST_VECTOR_NUNITS (r);
8727
8728 for (i = 0; i < units; ++i)
8729 {
8730 elt = CONST_VECTOR_ELT (r, i);
8731 if (!rtl_expr_nonnegative_p (elt))
8732 return 0;
8733 }
8734
8735 return 1;
8736 }
8737
8738 case SYMBOL_REF:
8739 case LABEL_REF:
8740 /* These are always nonnegative. */
8741 return 1;
8742
8743 default:
8744 return 0;
8745 }
8746 }
8747
8748 #include "gt-fold-const.h"