Fix for ia64-linux misoptimization.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30
31 /* The entry points in this file are fold, size_int_wide, size_binop
32 and force_fit_type.
33
34 fold takes a tree as argument and returns a simplified tree.
35
36 size_binop takes a tree code for an arithmetic operation
37 and two operands that are trees, and produces a tree for the
38 result, assuming the type comes from `sizetype'.
39
40 size_int takes an integer value, and creates a tree constant
41 with type from `sizetype'.
42
43 force_fit_type takes a constant and prior overflow indicator, and
44 forces the value to fit the type. It returns an overflow indicator. */
45
46 #include "config.h"
47 #include "system.h"
48 #include <setjmp.h>
49 #include "flags.h"
50 #include "tree.h"
51 #include "rtl.h"
52 #include "tm_p.h"
53 #include "toplev.h"
54 #include "ggc.h"
55
56 static void encode PARAMS ((HOST_WIDE_INT *,
57 unsigned HOST_WIDE_INT,
58 HOST_WIDE_INT));
59 static void decode PARAMS ((HOST_WIDE_INT *,
60 unsigned HOST_WIDE_INT *,
61 HOST_WIDE_INT *));
62 static tree negate_expr PARAMS ((tree));
63 static tree split_tree PARAMS ((tree, enum tree_code, tree *, tree *,
64 int));
65 static tree associate_trees PARAMS ((tree, tree, enum tree_code, tree));
66 static tree int_const_binop PARAMS ((enum tree_code, tree, tree, int, int));
67 static void const_binop_1 PARAMS ((PTR));
68 static tree const_binop PARAMS ((enum tree_code, tree, tree, int));
69 static void fold_convert_1 PARAMS ((PTR));
70 static tree fold_convert PARAMS ((tree, tree));
71 static enum tree_code invert_tree_comparison PARAMS ((enum tree_code));
72 static enum tree_code swap_tree_comparison PARAMS ((enum tree_code));
73 static int truth_value_p PARAMS ((enum tree_code));
74 static int operand_equal_for_comparison_p PARAMS ((tree, tree, tree));
75 static int twoval_comparison_p PARAMS ((tree, tree *, tree *, int *));
76 static tree eval_subst PARAMS ((tree, tree, tree, tree, tree));
77 static tree omit_one_operand PARAMS ((tree, tree, tree));
78 static tree pedantic_omit_one_operand PARAMS ((tree, tree, tree));
79 static tree distribute_bit_expr PARAMS ((enum tree_code, tree, tree, tree));
80 static tree make_bit_field_ref PARAMS ((tree, tree, int, int, int));
81 static tree optimize_bit_field_compare PARAMS ((enum tree_code, tree,
82 tree, tree));
83 static tree decode_field_reference PARAMS ((tree, HOST_WIDE_INT *,
84 HOST_WIDE_INT *,
85 enum machine_mode *, int *,
86 int *, tree *, tree *));
87 static int all_ones_mask_p PARAMS ((tree, int));
88 static int simple_operand_p PARAMS ((tree));
89 static tree range_binop PARAMS ((enum tree_code, tree, tree, int,
90 tree, int));
91 static tree make_range PARAMS ((tree, int *, tree *, tree *));
92 static tree build_range_check PARAMS ((tree, tree, int, tree, tree));
93 static int merge_ranges PARAMS ((int *, tree *, tree *, int, tree, tree,
94 int, tree, tree));
95 static tree fold_range_test PARAMS ((tree));
96 static tree unextend PARAMS ((tree, int, int, tree));
97 static tree fold_truthop PARAMS ((enum tree_code, tree, tree, tree));
98 static tree optimize_minmax_comparison PARAMS ((tree));
99 static tree extract_muldiv PARAMS ((tree, tree, enum tree_code, tree));
100 static tree strip_compound_expr PARAMS ((tree, tree));
101 static int multiple_of_p PARAMS ((tree, tree, tree));
102 static tree constant_boolean_node PARAMS ((int, tree));
103 static int count_cond PARAMS ((tree, int));
104
105 #ifndef BRANCH_COST
106 #define BRANCH_COST 1
107 #endif
108
109 #if defined(HOST_EBCDIC)
110 /* bit 8 is significant in EBCDIC */
111 #define CHARMASK 0xff
112 #else
113 #define CHARMASK 0x7f
114 #endif
115
116
117 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
118 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
119 and SUM1. Then this yields nonzero if overflow occurred during the
120 addition.
121
122 Overflow occurs if A and B have the same sign, but A and SUM differ in
123 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
124 sign. */
125 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
126 \f
127 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
128 We do that by representing the two-word integer in 4 words, with only
129 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
130 number. The value of the word is LOWPART + HIGHPART * BASE. */
131
132 #define LOWPART(x) \
133 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
134 #define HIGHPART(x) \
135 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
136 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
137
138 /* Unpack a two-word integer into 4 words.
139 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
140 WORDS points to the array of HOST_WIDE_INTs. */
141
142 static void
143 encode (words, low, hi)
144 HOST_WIDE_INT *words;
145 unsigned HOST_WIDE_INT low;
146 HOST_WIDE_INT hi;
147 {
148 words[0] = LOWPART (low);
149 words[1] = HIGHPART (low);
150 words[2] = LOWPART (hi);
151 words[3] = HIGHPART (hi);
152 }
153
154 /* Pack an array of 4 words into a two-word integer.
155 WORDS points to the array of words.
156 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
157
158 static void
159 decode (words, low, hi)
160 HOST_WIDE_INT *words;
161 unsigned HOST_WIDE_INT *low;
162 HOST_WIDE_INT *hi;
163 {
164 *low = words[0] + words[1] * BASE;
165 *hi = words[2] + words[3] * BASE;
166 }
167 \f
168 /* Make the integer constant T valid for its type by setting to 0 or 1 all
169 the bits in the constant that don't belong in the type.
170
171 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
172 nonzero, a signed overflow has already occurred in calculating T, so
173 propagate it.
174
175 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
176 if it exists. */
177
178 int
179 force_fit_type (t, overflow)
180 tree t;
181 int overflow;
182 {
183 unsigned HOST_WIDE_INT low;
184 HOST_WIDE_INT high;
185 unsigned int prec;
186
187 if (TREE_CODE (t) == REAL_CST)
188 {
189 #ifdef CHECK_FLOAT_VALUE
190 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t)), TREE_REAL_CST (t),
191 overflow);
192 #endif
193 return overflow;
194 }
195
196 else if (TREE_CODE (t) != INTEGER_CST)
197 return overflow;
198
199 low = TREE_INT_CST_LOW (t);
200 high = TREE_INT_CST_HIGH (t);
201
202 if (POINTER_TYPE_P (TREE_TYPE (t)))
203 prec = POINTER_SIZE;
204 else
205 prec = TYPE_PRECISION (TREE_TYPE (t));
206
207 /* First clear all bits that are beyond the type's precision. */
208
209 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
210 ;
211 else if (prec > HOST_BITS_PER_WIDE_INT)
212 TREE_INT_CST_HIGH (t)
213 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
214 else
215 {
216 TREE_INT_CST_HIGH (t) = 0;
217 if (prec < HOST_BITS_PER_WIDE_INT)
218 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
219 }
220
221 /* Unsigned types do not suffer sign extension or overflow. */
222 if (TREE_UNSIGNED (TREE_TYPE (t)))
223 return overflow;
224
225 /* If the value's sign bit is set, extend the sign. */
226 if (prec != 2 * HOST_BITS_PER_WIDE_INT
227 && (prec > HOST_BITS_PER_WIDE_INT
228 ? 0 != (TREE_INT_CST_HIGH (t)
229 & ((HOST_WIDE_INT) 1
230 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
231 : 0 != (TREE_INT_CST_LOW (t)
232 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
233 {
234 /* Value is negative:
235 set to 1 all the bits that are outside this type's precision. */
236 if (prec > HOST_BITS_PER_WIDE_INT)
237 TREE_INT_CST_HIGH (t)
238 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
239 else
240 {
241 TREE_INT_CST_HIGH (t) = -1;
242 if (prec < HOST_BITS_PER_WIDE_INT)
243 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
244 }
245 }
246
247 /* Return nonzero if signed overflow occurred. */
248 return
249 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
250 != 0);
251 }
252 \f
253 /* Add two doubleword integers with doubleword result.
254 Each argument is given as two `HOST_WIDE_INT' pieces.
255 One argument is L1 and H1; the other, L2 and H2.
256 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
257
258 int
259 add_double (l1, h1, l2, h2, lv, hv)
260 unsigned HOST_WIDE_INT l1, l2;
261 HOST_WIDE_INT h1, h2;
262 unsigned HOST_WIDE_INT *lv;
263 HOST_WIDE_INT *hv;
264 {
265 unsigned HOST_WIDE_INT l;
266 HOST_WIDE_INT h;
267
268 l = l1 + l2;
269 h = h1 + h2 + (l < l1);
270
271 *lv = l;
272 *hv = h;
273 return OVERFLOW_SUM_SIGN (h1, h2, h);
274 }
275
276 /* Negate a doubleword integer with doubleword result.
277 Return nonzero if the operation overflows, assuming it's signed.
278 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
279 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
280
281 int
282 neg_double (l1, h1, lv, hv)
283 unsigned HOST_WIDE_INT l1;
284 HOST_WIDE_INT h1;
285 unsigned HOST_WIDE_INT *lv;
286 HOST_WIDE_INT *hv;
287 {
288 if (l1 == 0)
289 {
290 *lv = 0;
291 *hv = - h1;
292 return (*hv & h1) < 0;
293 }
294 else
295 {
296 *lv = - l1;
297 *hv = ~ h1;
298 return 0;
299 }
300 }
301 \f
302 /* Multiply two doubleword integers with doubleword result.
303 Return nonzero if the operation overflows, assuming it's signed.
304 Each argument is given as two `HOST_WIDE_INT' pieces.
305 One argument is L1 and H1; the other, L2 and H2.
306 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
307
308 int
309 mul_double (l1, h1, l2, h2, lv, hv)
310 unsigned HOST_WIDE_INT l1, l2;
311 HOST_WIDE_INT h1, h2;
312 unsigned HOST_WIDE_INT *lv;
313 HOST_WIDE_INT *hv;
314 {
315 HOST_WIDE_INT arg1[4];
316 HOST_WIDE_INT arg2[4];
317 HOST_WIDE_INT prod[4 * 2];
318 register unsigned HOST_WIDE_INT carry;
319 register int i, j, k;
320 unsigned HOST_WIDE_INT toplow, neglow;
321 HOST_WIDE_INT tophigh, neghigh;
322
323 encode (arg1, l1, h1);
324 encode (arg2, l2, h2);
325
326 bzero ((char *) prod, sizeof prod);
327
328 for (i = 0; i < 4; i++)
329 {
330 carry = 0;
331 for (j = 0; j < 4; j++)
332 {
333 k = i + j;
334 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
335 carry += arg1[i] * arg2[j];
336 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
337 carry += prod[k];
338 prod[k] = LOWPART (carry);
339 carry = HIGHPART (carry);
340 }
341 prod[i + 4] = carry;
342 }
343
344 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
345
346 /* Check for overflow by calculating the top half of the answer in full;
347 it should agree with the low half's sign bit. */
348 decode (prod+4, &toplow, &tophigh);
349 if (h1 < 0)
350 {
351 neg_double (l2, h2, &neglow, &neghigh);
352 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
353 }
354 if (h2 < 0)
355 {
356 neg_double (l1, h1, &neglow, &neghigh);
357 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
358 }
359 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
360 }
361 \f
362 /* Shift the doubleword integer in L1, H1 left by COUNT places
363 keeping only PREC bits of result.
364 Shift right if COUNT is negative.
365 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
366 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
367
368 void
369 lshift_double (l1, h1, count, prec, lv, hv, arith)
370 unsigned HOST_WIDE_INT l1;
371 HOST_WIDE_INT h1, count;
372 unsigned int prec;
373 unsigned HOST_WIDE_INT *lv;
374 HOST_WIDE_INT *hv;
375 int arith;
376 {
377 if (count < 0)
378 {
379 rshift_double (l1, h1, - count, prec, lv, hv, arith);
380 return;
381 }
382
383 #ifdef SHIFT_COUNT_TRUNCATED
384 if (SHIFT_COUNT_TRUNCATED)
385 count %= prec;
386 #endif
387
388 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
389 {
390 /* Shifting by the host word size is undefined according to the
391 ANSI standard, so we must handle this as a special case. */
392 *hv = 0;
393 *lv = 0;
394 }
395 else if (count >= HOST_BITS_PER_WIDE_INT)
396 {
397 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
398 *lv = 0;
399 }
400 else
401 {
402 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
403 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
404 *lv = l1 << count;
405 }
406 }
407
408 /* Shift the doubleword integer in L1, H1 right by COUNT places
409 keeping only PREC bits of result. COUNT must be positive.
410 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
411 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
412
413 void
414 rshift_double (l1, h1, count, prec, lv, hv, arith)
415 unsigned HOST_WIDE_INT l1;
416 HOST_WIDE_INT h1, count;
417 unsigned int prec ATTRIBUTE_UNUSED;
418 unsigned HOST_WIDE_INT *lv;
419 HOST_WIDE_INT *hv;
420 int arith;
421 {
422 unsigned HOST_WIDE_INT signmask;
423
424 signmask = (arith
425 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
426 : 0);
427
428 #ifdef SHIFT_COUNT_TRUNCATED
429 if (SHIFT_COUNT_TRUNCATED)
430 count %= prec;
431 #endif
432
433 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
434 {
435 /* Shifting by the host word size is undefined according to the
436 ANSI standard, so we must handle this as a special case. */
437 *hv = signmask;
438 *lv = signmask;
439 }
440 else if (count >= HOST_BITS_PER_WIDE_INT)
441 {
442 *hv = signmask;
443 *lv = ((signmask << (2 * HOST_BITS_PER_WIDE_INT - count - 1) << 1)
444 | ((unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT)));
445 }
446 else
447 {
448 *lv = ((l1 >> count)
449 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
450 *hv = ((signmask << (HOST_BITS_PER_WIDE_INT - count))
451 | ((unsigned HOST_WIDE_INT) h1 >> count));
452 }
453 }
454 \f
455 /* Rotate the doubleword integer in L1, H1 left by COUNT places
456 keeping only PREC bits of result.
457 Rotate right if COUNT is negative.
458 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
459
460 void
461 lrotate_double (l1, h1, count, prec, lv, hv)
462 unsigned HOST_WIDE_INT l1;
463 HOST_WIDE_INT h1, count;
464 unsigned int prec;
465 unsigned HOST_WIDE_INT *lv;
466 HOST_WIDE_INT *hv;
467 {
468 unsigned HOST_WIDE_INT s1l, s2l;
469 HOST_WIDE_INT s1h, s2h;
470
471 count %= prec;
472 if (count < 0)
473 count += prec;
474
475 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
476 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
477 *lv = s1l | s2l;
478 *hv = s1h | s2h;
479 }
480
481 /* Rotate the doubleword integer in L1, H1 left by COUNT places
482 keeping only PREC bits of result. COUNT must be positive.
483 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
484
485 void
486 rrotate_double (l1, h1, count, prec, lv, hv)
487 unsigned HOST_WIDE_INT l1;
488 HOST_WIDE_INT h1, count;
489 unsigned int prec;
490 unsigned HOST_WIDE_INT *lv;
491 HOST_WIDE_INT *hv;
492 {
493 unsigned HOST_WIDE_INT s1l, s2l;
494 HOST_WIDE_INT s1h, s2h;
495
496 count %= prec;
497 if (count < 0)
498 count += prec;
499
500 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
501 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
502 *lv = s1l | s2l;
503 *hv = s1h | s2h;
504 }
505 \f
506 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
507 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
508 CODE is a tree code for a kind of division, one of
509 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
510 or EXACT_DIV_EXPR
511 It controls how the quotient is rounded to a integer.
512 Return nonzero if the operation overflows.
513 UNS nonzero says do unsigned division. */
514
515 int
516 div_and_round_double (code, uns,
517 lnum_orig, hnum_orig, lden_orig, hden_orig,
518 lquo, hquo, lrem, hrem)
519 enum tree_code code;
520 int uns;
521 unsigned HOST_WIDE_INT lnum_orig; /* num == numerator == dividend */
522 HOST_WIDE_INT hnum_orig;
523 unsigned HOST_WIDE_INT lden_orig; /* den == denominator == divisor */
524 HOST_WIDE_INT hden_orig;
525 unsigned HOST_WIDE_INT *lquo, *lrem;
526 HOST_WIDE_INT *hquo, *hrem;
527 {
528 int quo_neg = 0;
529 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
530 HOST_WIDE_INT den[4], quo[4];
531 register int i, j;
532 unsigned HOST_WIDE_INT work;
533 unsigned HOST_WIDE_INT carry = 0;
534 unsigned HOST_WIDE_INT lnum = lnum_orig;
535 HOST_WIDE_INT hnum = hnum_orig;
536 unsigned HOST_WIDE_INT lden = lden_orig;
537 HOST_WIDE_INT hden = hden_orig;
538 int overflow = 0;
539
540 if (hden == 0 && lden == 0)
541 overflow = 1, lden = 1;
542
543 /* calculate quotient sign and convert operands to unsigned. */
544 if (!uns)
545 {
546 if (hnum < 0)
547 {
548 quo_neg = ~ quo_neg;
549 /* (minimum integer) / (-1) is the only overflow case. */
550 if (neg_double (lnum, hnum, &lnum, &hnum)
551 && ((HOST_WIDE_INT) lden & hden) == -1)
552 overflow = 1;
553 }
554 if (hden < 0)
555 {
556 quo_neg = ~ quo_neg;
557 neg_double (lden, hden, &lden, &hden);
558 }
559 }
560
561 if (hnum == 0 && hden == 0)
562 { /* single precision */
563 *hquo = *hrem = 0;
564 /* This unsigned division rounds toward zero. */
565 *lquo = lnum / lden;
566 goto finish_up;
567 }
568
569 if (hnum == 0)
570 { /* trivial case: dividend < divisor */
571 /* hden != 0 already checked. */
572 *hquo = *lquo = 0;
573 *hrem = hnum;
574 *lrem = lnum;
575 goto finish_up;
576 }
577
578 bzero ((char *) quo, sizeof quo);
579
580 bzero ((char *) num, sizeof num); /* to zero 9th element */
581 bzero ((char *) den, sizeof den);
582
583 encode (num, lnum, hnum);
584 encode (den, lden, hden);
585
586 /* Special code for when the divisor < BASE. */
587 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
588 {
589 /* hnum != 0 already checked. */
590 for (i = 4 - 1; i >= 0; i--)
591 {
592 work = num[i] + carry * BASE;
593 quo[i] = work / lden;
594 carry = work % lden;
595 }
596 }
597 else
598 {
599 /* Full double precision division,
600 with thanks to Don Knuth's "Seminumerical Algorithms". */
601 int num_hi_sig, den_hi_sig;
602 unsigned HOST_WIDE_INT quo_est, scale;
603
604 /* Find the highest non-zero divisor digit. */
605 for (i = 4 - 1; ; i--)
606 if (den[i] != 0) {
607 den_hi_sig = i;
608 break;
609 }
610
611 /* Insure that the first digit of the divisor is at least BASE/2.
612 This is required by the quotient digit estimation algorithm. */
613
614 scale = BASE / (den[den_hi_sig] + 1);
615 if (scale > 1)
616 { /* scale divisor and dividend */
617 carry = 0;
618 for (i = 0; i <= 4 - 1; i++)
619 {
620 work = (num[i] * scale) + carry;
621 num[i] = LOWPART (work);
622 carry = HIGHPART (work);
623 }
624
625 num[4] = carry;
626 carry = 0;
627 for (i = 0; i <= 4 - 1; i++)
628 {
629 work = (den[i] * scale) + carry;
630 den[i] = LOWPART (work);
631 carry = HIGHPART (work);
632 if (den[i] != 0) den_hi_sig = i;
633 }
634 }
635
636 num_hi_sig = 4;
637
638 /* Main loop */
639 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
640 {
641 /* Guess the next quotient digit, quo_est, by dividing the first
642 two remaining dividend digits by the high order quotient digit.
643 quo_est is never low and is at most 2 high. */
644 unsigned HOST_WIDE_INT tmp;
645
646 num_hi_sig = i + den_hi_sig + 1;
647 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
648 if (num[num_hi_sig] != den[den_hi_sig])
649 quo_est = work / den[den_hi_sig];
650 else
651 quo_est = BASE - 1;
652
653 /* Refine quo_est so it's usually correct, and at most one high. */
654 tmp = work - quo_est * den[den_hi_sig];
655 if (tmp < BASE
656 && (den[den_hi_sig - 1] * quo_est
657 > (tmp * BASE + num[num_hi_sig - 2])))
658 quo_est--;
659
660 /* Try QUO_EST as the quotient digit, by multiplying the
661 divisor by QUO_EST and subtracting from the remaining dividend.
662 Keep in mind that QUO_EST is the I - 1st digit. */
663
664 carry = 0;
665 for (j = 0; j <= den_hi_sig; j++)
666 {
667 work = quo_est * den[j] + carry;
668 carry = HIGHPART (work);
669 work = num[i + j] - LOWPART (work);
670 num[i + j] = LOWPART (work);
671 carry += HIGHPART (work) != 0;
672 }
673
674 /* If quo_est was high by one, then num[i] went negative and
675 we need to correct things. */
676 if (num[num_hi_sig] < carry)
677 {
678 quo_est--;
679 carry = 0; /* add divisor back in */
680 for (j = 0; j <= den_hi_sig; j++)
681 {
682 work = num[i + j] + den[j] + carry;
683 carry = HIGHPART (work);
684 num[i + j] = LOWPART (work);
685 }
686
687 num [num_hi_sig] += carry;
688 }
689
690 /* Store the quotient digit. */
691 quo[i] = quo_est;
692 }
693 }
694
695 decode (quo, lquo, hquo);
696
697 finish_up:
698 /* if result is negative, make it so. */
699 if (quo_neg)
700 neg_double (*lquo, *hquo, lquo, hquo);
701
702 /* compute trial remainder: rem = num - (quo * den) */
703 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
704 neg_double (*lrem, *hrem, lrem, hrem);
705 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
706
707 switch (code)
708 {
709 case TRUNC_DIV_EXPR:
710 case TRUNC_MOD_EXPR: /* round toward zero */
711 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
712 return overflow;
713
714 case FLOOR_DIV_EXPR:
715 case FLOOR_MOD_EXPR: /* round toward negative infinity */
716 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
717 {
718 /* quo = quo - 1; */
719 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
720 lquo, hquo);
721 }
722 else
723 return overflow;
724 break;
725
726 case CEIL_DIV_EXPR:
727 case CEIL_MOD_EXPR: /* round toward positive infinity */
728 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
729 {
730 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
731 lquo, hquo);
732 }
733 else
734 return overflow;
735 break;
736
737 case ROUND_DIV_EXPR:
738 case ROUND_MOD_EXPR: /* round to closest integer */
739 {
740 unsigned HOST_WIDE_INT labs_rem = *lrem;
741 HOST_WIDE_INT habs_rem = *hrem;
742 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
743 HOST_WIDE_INT habs_den = hden, htwice;
744
745 /* Get absolute values */
746 if (*hrem < 0)
747 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
748 if (hden < 0)
749 neg_double (lden, hden, &labs_den, &habs_den);
750
751 /* If (2 * abs (lrem) >= abs (lden)) */
752 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
753 labs_rem, habs_rem, &ltwice, &htwice);
754
755 if (((unsigned HOST_WIDE_INT) habs_den
756 < (unsigned HOST_WIDE_INT) htwice)
757 || (((unsigned HOST_WIDE_INT) habs_den
758 == (unsigned HOST_WIDE_INT) htwice)
759 && (labs_den < ltwice)))
760 {
761 if (*hquo < 0)
762 /* quo = quo - 1; */
763 add_double (*lquo, *hquo,
764 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
765 else
766 /* quo = quo + 1; */
767 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
768 lquo, hquo);
769 }
770 else
771 return overflow;
772 }
773 break;
774
775 default:
776 abort ();
777 }
778
779 /* compute true remainder: rem = num - (quo * den) */
780 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
781 neg_double (*lrem, *hrem, lrem, hrem);
782 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
783 return overflow;
784 }
785 \f
786 #ifndef REAL_ARITHMETIC
787 /* Effectively truncate a real value to represent the nearest possible value
788 in a narrower mode. The result is actually represented in the same data
789 type as the argument, but its value is usually different.
790
791 A trap may occur during the FP operations and it is the responsibility
792 of the calling function to have a handler established. */
793
794 REAL_VALUE_TYPE
795 real_value_truncate (mode, arg)
796 enum machine_mode mode;
797 REAL_VALUE_TYPE arg;
798 {
799 return REAL_VALUE_TRUNCATE (mode, arg);
800 }
801
802 #if TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
803
804 /* Check for infinity in an IEEE double precision number. */
805
806 int
807 target_isinf (x)
808 REAL_VALUE_TYPE x;
809 {
810 /* The IEEE 64-bit double format. */
811 union {
812 REAL_VALUE_TYPE d;
813 struct {
814 unsigned sign : 1;
815 unsigned exponent : 11;
816 unsigned mantissa1 : 20;
817 unsigned mantissa2;
818 } little_endian;
819 struct {
820 unsigned mantissa2;
821 unsigned mantissa1 : 20;
822 unsigned exponent : 11;
823 unsigned sign : 1;
824 } big_endian;
825 } u;
826
827 u.d = dconstm1;
828 if (u.big_endian.sign == 1)
829 {
830 u.d = x;
831 return (u.big_endian.exponent == 2047
832 && u.big_endian.mantissa1 == 0
833 && u.big_endian.mantissa2 == 0);
834 }
835 else
836 {
837 u.d = x;
838 return (u.little_endian.exponent == 2047
839 && u.little_endian.mantissa1 == 0
840 && u.little_endian.mantissa2 == 0);
841 }
842 }
843
844 /* Check whether an IEEE double precision number is a NaN. */
845
846 int
847 target_isnan (x)
848 REAL_VALUE_TYPE x;
849 {
850 /* The IEEE 64-bit double format. */
851 union {
852 REAL_VALUE_TYPE d;
853 struct {
854 unsigned sign : 1;
855 unsigned exponent : 11;
856 unsigned mantissa1 : 20;
857 unsigned mantissa2;
858 } little_endian;
859 struct {
860 unsigned mantissa2;
861 unsigned mantissa1 : 20;
862 unsigned exponent : 11;
863 unsigned sign : 1;
864 } big_endian;
865 } u;
866
867 u.d = dconstm1;
868 if (u.big_endian.sign == 1)
869 {
870 u.d = x;
871 return (u.big_endian.exponent == 2047
872 && (u.big_endian.mantissa1 != 0
873 || u.big_endian.mantissa2 != 0));
874 }
875 else
876 {
877 u.d = x;
878 return (u.little_endian.exponent == 2047
879 && (u.little_endian.mantissa1 != 0
880 || u.little_endian.mantissa2 != 0));
881 }
882 }
883
884 /* Check for a negative IEEE double precision number. */
885
886 int
887 target_negative (x)
888 REAL_VALUE_TYPE x;
889 {
890 /* The IEEE 64-bit double format. */
891 union {
892 REAL_VALUE_TYPE d;
893 struct {
894 unsigned sign : 1;
895 unsigned exponent : 11;
896 unsigned mantissa1 : 20;
897 unsigned mantissa2;
898 } little_endian;
899 struct {
900 unsigned mantissa2;
901 unsigned mantissa1 : 20;
902 unsigned exponent : 11;
903 unsigned sign : 1;
904 } big_endian;
905 } u;
906
907 u.d = dconstm1;
908 if (u.big_endian.sign == 1)
909 {
910 u.d = x;
911 return u.big_endian.sign;
912 }
913 else
914 {
915 u.d = x;
916 return u.little_endian.sign;
917 }
918 }
919 #else /* Target not IEEE */
920
921 /* Let's assume other float formats don't have infinity.
922 (This can be overridden by redefining REAL_VALUE_ISINF.) */
923
924 int
925 target_isinf (x)
926 REAL_VALUE_TYPE x ATTRIBUTE_UNUSED;
927 {
928 return 0;
929 }
930
931 /* Let's assume other float formats don't have NaNs.
932 (This can be overridden by redefining REAL_VALUE_ISNAN.) */
933
934 int
935 target_isnan (x)
936 REAL_VALUE_TYPE x ATTRIBUTE_UNUSED;
937 {
938 return 0;
939 }
940
941 /* Let's assume other float formats don't have minus zero.
942 (This can be overridden by redefining REAL_VALUE_NEGATIVE.) */
943
944 int
945 target_negative (x)
946 REAL_VALUE_TYPE x;
947 {
948 return x < 0;
949 }
950 #endif /* Target not IEEE */
951
952 /* Try to change R into its exact multiplicative inverse in machine mode
953 MODE. Return nonzero function value if successful. */
954
955 int
956 exact_real_inverse (mode, r)
957 enum machine_mode mode;
958 REAL_VALUE_TYPE *r;
959 {
960 jmp_buf float_error;
961 union
962 {
963 double d;
964 unsigned short i[4];
965 }x, t, y;
966 #ifdef CHECK_FLOAT_VALUE
967 int i;
968 #endif
969
970 /* Usually disable if bounds checks are not reliable. */
971 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT) && !flag_pretend_float)
972 return 0;
973
974 /* Set array index to the less significant bits in the unions, depending
975 on the endian-ness of the host doubles.
976 Disable if insufficient information on the data structure. */
977 #if HOST_FLOAT_FORMAT == UNKNOWN_FLOAT_FORMAT
978 return 0;
979 #else
980 #if HOST_FLOAT_FORMAT == VAX_FLOAT_FORMAT
981 #define K 2
982 #else
983 #if HOST_FLOAT_FORMAT == IBM_FLOAT_FORMAT
984 #define K 2
985 #else
986 #define K (2 * HOST_FLOAT_WORDS_BIG_ENDIAN)
987 #endif
988 #endif
989 #endif
990
991 if (setjmp (float_error))
992 {
993 /* Don't do the optimization if there was an arithmetic error. */
994 fail:
995 set_float_handler (NULL_PTR);
996 return 0;
997 }
998 set_float_handler (float_error);
999
1000 /* Domain check the argument. */
1001 x.d = *r;
1002 if (x.d == 0.0)
1003 goto fail;
1004
1005 #ifdef REAL_INFINITY
1006 if (REAL_VALUE_ISINF (x.d) || REAL_VALUE_ISNAN (x.d))
1007 goto fail;
1008 #endif
1009
1010 /* Compute the reciprocal and check for numerical exactness.
1011 It is unnecessary to check all the significand bits to determine
1012 whether X is a power of 2. If X is not, then it is impossible for
1013 the bottom half significand of both X and 1/X to be all zero bits.
1014 Hence we ignore the data structure of the top half and examine only
1015 the low order bits of the two significands. */
1016 t.d = 1.0 / x.d;
1017 if (x.i[K] != 0 || x.i[K + 1] != 0 || t.i[K] != 0 || t.i[K + 1] != 0)
1018 goto fail;
1019
1020 /* Truncate to the required mode and range-check the result. */
1021 y.d = REAL_VALUE_TRUNCATE (mode, t.d);
1022 #ifdef CHECK_FLOAT_VALUE
1023 i = 0;
1024 if (CHECK_FLOAT_VALUE (mode, y.d, i))
1025 goto fail;
1026 #endif
1027
1028 /* Fail if truncation changed the value. */
1029 if (y.d != t.d || y.d == 0.0)
1030 goto fail;
1031
1032 #ifdef REAL_INFINITY
1033 if (REAL_VALUE_ISINF (y.d) || REAL_VALUE_ISNAN (y.d))
1034 goto fail;
1035 #endif
1036
1037 /* Output the reciprocal and return success flag. */
1038 set_float_handler (NULL_PTR);
1039 *r = y.d;
1040 return 1;
1041 }
1042
1043 /* Convert C9X hexadecimal floating point string constant S. Return
1044 real value type in mode MODE. This function uses the host computer's
1045 floating point arithmetic when there is no REAL_ARITHMETIC. */
1046
1047 REAL_VALUE_TYPE
1048 real_hex_to_f (s, mode)
1049 char *s;
1050 enum machine_mode mode;
1051 {
1052 REAL_VALUE_TYPE ip;
1053 char *p = s;
1054 unsigned HOST_WIDE_INT low, high;
1055 int shcount, nrmcount, k;
1056 int sign, expsign, isfloat;
1057 int lost = 0;/* Nonzero low order bits shifted out and discarded. */
1058 int frexpon = 0; /* Bits after the decimal point. */
1059 int expon = 0; /* Value of exponent. */
1060 int decpt = 0; /* How many decimal points. */
1061 int gotp = 0; /* How many P's. */
1062 char c;
1063
1064 isfloat = 0;
1065 expsign = 1;
1066 ip = 0.0;
1067
1068 while (*p == ' ' || *p == '\t')
1069 ++p;
1070
1071 /* Sign, if any, comes first. */
1072 sign = 1;
1073 if (*p == '-')
1074 {
1075 sign = -1;
1076 ++p;
1077 }
1078
1079 /* The string is supposed to start with 0x or 0X . */
1080 if (*p == '0')
1081 {
1082 ++p;
1083 if (*p == 'x' || *p == 'X')
1084 ++p;
1085 else
1086 abort ();
1087 }
1088 else
1089 abort ();
1090
1091 while (*p == '0')
1092 ++p;
1093
1094 high = 0;
1095 low = 0;
1096 shcount = 0;
1097 while ((c = *p) != '\0')
1098 {
1099 if ((c >= '0' && c <= '9') || (c >= 'A' && c <= 'F')
1100 || (c >= 'a' && c <= 'f'))
1101 {
1102 k = c & CHARMASK;
1103 if (k >= 'a' && k <= 'f')
1104 k = k - 'a' + 10;
1105 else if (k >= 'A')
1106 k = k - 'A' + 10;
1107 else
1108 k = k - '0';
1109
1110 if ((high & 0xf0000000) == 0)
1111 {
1112 high = (high << 4) + ((low >> 28) & 15);
1113 low = (low << 4) + k;
1114 shcount += 4;
1115 if (decpt)
1116 frexpon += 4;
1117 }
1118 else
1119 {
1120 /* Record nonzero lost bits. */
1121 lost |= k;
1122 if (! decpt)
1123 frexpon -= 4;
1124 }
1125 ++p;
1126 }
1127 else if ( c == '.')
1128 {
1129 ++decpt;
1130 ++p;
1131 }
1132
1133 else if (c == 'p' || c == 'P')
1134 {
1135 ++gotp;
1136 ++p;
1137 /* Sign of exponent. */
1138 if (*p == '-')
1139 {
1140 expsign = -1;
1141 ++p;
1142 }
1143
1144 /* Value of exponent.
1145 The exponent field is a decimal integer. */
1146 while (ISDIGIT(*p))
1147 {
1148 k = (*p++ & CHARMASK) - '0';
1149 expon = 10 * expon + k;
1150 }
1151
1152 expon *= expsign;
1153 /* F suffix is ambiguous in the significand part
1154 so it must appear after the decimal exponent field. */
1155 if (*p == 'f' || *p == 'F')
1156 {
1157 isfloat = 1;
1158 ++p;
1159 break;
1160 }
1161 }
1162
1163 else if (c == 'l' || c == 'L')
1164 {
1165 ++p;
1166 break;
1167 }
1168 else
1169 break;
1170 }
1171
1172 /* Abort if last character read was not legitimate. */
1173 c = *p;
1174 if ((c != '\0' && c != ' ' && c != '\n' && c != '\r') || (decpt > 1))
1175 abort ();
1176
1177 /* There must be either one decimal point or one p. */
1178 if (decpt == 0 && gotp == 0)
1179 abort ();
1180
1181 shcount -= 4;
1182 if (high == 0 && low == 0)
1183 return dconst0;
1184
1185 /* Normalize. */
1186 nrmcount = 0;
1187 if (high == 0)
1188 {
1189 high = low;
1190 low = 0;
1191 nrmcount += 32;
1192 }
1193
1194 /* Leave a high guard bit for carry-out. */
1195 if ((high & 0x80000000) != 0)
1196 {
1197 lost |= low & 1;
1198 low = (low >> 1) | (high << 31);
1199 high = high >> 1;
1200 nrmcount -= 1;
1201 }
1202
1203 if ((high & 0xffff8000) == 0)
1204 {
1205 high = (high << 16) + ((low >> 16) & 0xffff);
1206 low = low << 16;
1207 nrmcount += 16;
1208 }
1209
1210 while ((high & 0xc0000000) == 0)
1211 {
1212 high = (high << 1) + ((low >> 31) & 1);
1213 low = low << 1;
1214 nrmcount += 1;
1215 }
1216
1217 if (isfloat || GET_MODE_SIZE(mode) == UNITS_PER_WORD)
1218 {
1219 /* Keep 24 bits precision, bits 0x7fffff80.
1220 Rounding bit is 0x40. */
1221 lost = lost | low | (high & 0x3f);
1222 low = 0;
1223 if (high & 0x40)
1224 {
1225 if ((high & 0x80) || lost)
1226 high += 0x40;
1227 }
1228 high &= 0xffffff80;
1229 }
1230 else
1231 {
1232 /* We need real.c to do long double formats, so here default
1233 to double precision. */
1234 #if HOST_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
1235 /* IEEE double.
1236 Keep 53 bits precision, bits 0x7fffffff fffffc00.
1237 Rounding bit is low word 0x200. */
1238 lost = lost | (low & 0x1ff);
1239 if (low & 0x200)
1240 {
1241 if ((low & 0x400) || lost)
1242 {
1243 low = (low + 0x200) & 0xfffffc00;
1244 if (low == 0)
1245 high += 1;
1246 }
1247 }
1248 low &= 0xfffffc00;
1249 #else
1250 /* Assume it's a VAX with 56-bit significand,
1251 bits 0x7fffffff ffffff80. */
1252 lost = lost | (low & 0x7f);
1253 if (low & 0x40)
1254 {
1255 if ((low & 0x80) || lost)
1256 {
1257 low = (low + 0x40) & 0xffffff80;
1258 if (low == 0)
1259 high += 1;
1260 }
1261 }
1262 low &= 0xffffff80;
1263 #endif
1264 }
1265
1266 ip = (double) high;
1267 ip = REAL_VALUE_LDEXP (ip, 32) + (double) low;
1268 /* Apply shifts and exponent value as power of 2. */
1269 ip = REAL_VALUE_LDEXP (ip, expon - (nrmcount + frexpon));
1270
1271 if (sign < 0)
1272 ip = -ip;
1273 return ip;
1274 }
1275
1276 #endif /* no REAL_ARITHMETIC */
1277 \f
1278 /* Given T, an expression, return the negation of T. Allow for T to be
1279 null, in which case return null. */
1280
1281 static tree
1282 negate_expr (t)
1283 tree t;
1284 {
1285 tree type;
1286 tree tem;
1287
1288 if (t == 0)
1289 return 0;
1290
1291 type = TREE_TYPE (t);
1292 STRIP_SIGN_NOPS (t);
1293
1294 switch (TREE_CODE (t))
1295 {
1296 case INTEGER_CST:
1297 case REAL_CST:
1298 if (! TREE_UNSIGNED (type)
1299 && 0 != (tem = fold (build1 (NEGATE_EXPR, type, t)))
1300 && ! TREE_OVERFLOW (tem))
1301 return tem;
1302 break;
1303
1304 case NEGATE_EXPR:
1305 return convert (type, TREE_OPERAND (t, 0));
1306
1307 case MINUS_EXPR:
1308 /* - (A - B) -> B - A */
1309 if (! FLOAT_TYPE_P (type) || flag_fast_math)
1310 return convert (type,
1311 fold (build (MINUS_EXPR, TREE_TYPE (t),
1312 TREE_OPERAND (t, 1),
1313 TREE_OPERAND (t, 0))));
1314 break;
1315
1316 default:
1317 break;
1318 }
1319
1320 return convert (type, build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1321 }
1322 \f
1323 /* Split a tree IN into a constant, literal and variable parts that could be
1324 combined with CODE to make IN. "constant" means an expression with
1325 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1326 commutative arithmetic operation. Store the constant part into *CONP,
1327 the literal in &LITP and return the variable part. If a part isn't
1328 present, set it to null. If the tree does not decompose in this way,
1329 return the entire tree as the variable part and the other parts as null.
1330
1331 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1332 case, we negate an operand that was subtracted. If NEGATE_P is true, we
1333 are negating all of IN.
1334
1335 If IN is itself a literal or constant, return it as appropriate.
1336
1337 Note that we do not guarantee that any of the three values will be the
1338 same type as IN, but they will have the same signedness and mode. */
1339
1340 static tree
1341 split_tree (in, code, conp, litp, negate_p)
1342 tree in;
1343 enum tree_code code;
1344 tree *conp, *litp;
1345 int negate_p;
1346 {
1347 tree var = 0;
1348
1349 *conp = 0;
1350 *litp = 0;
1351
1352 /* Strip any conversions that don't change the machine mode or signedness. */
1353 STRIP_SIGN_NOPS (in);
1354
1355 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1356 *litp = in;
1357 else if (TREE_CONSTANT (in))
1358 *conp = in;
1359
1360 else if (TREE_CODE (in) == code
1361 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1362 /* We can associate addition and subtraction together (even
1363 though the C standard doesn't say so) for integers because
1364 the value is not affected. For reals, the value might be
1365 affected, so we can't. */
1366 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1367 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1368 {
1369 tree op0 = TREE_OPERAND (in, 0);
1370 tree op1 = TREE_OPERAND (in, 1);
1371 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1372 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1373
1374 /* First see if either of the operands is a literal, then a constant. */
1375 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1376 *litp = op0, op0 = 0;
1377 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1378 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1379
1380 if (op0 != 0 && TREE_CONSTANT (op0))
1381 *conp = op0, op0 = 0;
1382 else if (op1 != 0 && TREE_CONSTANT (op1))
1383 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1384
1385 /* If we haven't dealt with either operand, this is not a case we can
1386 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1387 if (op0 != 0 && op1 != 0)
1388 var = in;
1389 else if (op0 != 0)
1390 var = op0;
1391 else
1392 var = op1, neg_var_p = neg1_p;
1393
1394 /* Now do any needed negations. */
1395 if (neg_litp_p) *litp = negate_expr (*litp);
1396 if (neg_conp_p) *conp = negate_expr (*conp);
1397 if (neg_var_p) var = negate_expr (var);
1398 }
1399 else
1400 var = in;
1401
1402 if (negate_p)
1403 {
1404 var = negate_expr (var);
1405 *conp = negate_expr (*conp);
1406 *litp = negate_expr (*litp);
1407 }
1408
1409 return var;
1410 }
1411
1412 /* Re-associate trees split by the above function. T1 and T2 are either
1413 expressions to associate or null. Return the new expression, if any. If
1414 we build an operation, do it in TYPE and with CODE, except if CODE is a
1415 MINUS_EXPR, in which case we use PLUS_EXPR since split_tree will already
1416 have taken care of the negations. */
1417
1418 static tree
1419 associate_trees (t1, t2, code, type)
1420 tree t1, t2;
1421 enum tree_code code;
1422 tree type;
1423 {
1424 if (t1 == 0)
1425 return t2;
1426 else if (t2 == 0)
1427 return t1;
1428
1429 if (code == MINUS_EXPR)
1430 code = PLUS_EXPR;
1431
1432 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1433 try to fold this since we will have infinite recursion. But do
1434 deal with any NEGATE_EXPRs. */
1435 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1436 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1437 {
1438 if (TREE_CODE (t1) == NEGATE_EXPR)
1439 return build (MINUS_EXPR, type, convert (type, t2),
1440 convert (type, TREE_OPERAND (t1, 0)));
1441 else if (TREE_CODE (t2) == NEGATE_EXPR)
1442 return build (MINUS_EXPR, type, convert (type, t1),
1443 convert (type, TREE_OPERAND (t2, 0)));
1444 else
1445 return build (code, type, convert (type, t1), convert (type, t2));
1446 }
1447
1448 return fold (build (code, type, convert (type, t1), convert (type, t2)));
1449 }
1450 \f
1451 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1452 to produce a new constant.
1453
1454 If NOTRUNC is nonzero, do not truncate the result to fit the data type.
1455 If FORSIZE is nonzero, compute overflow for unsigned types. */
1456
1457 static tree
1458 int_const_binop (code, arg1, arg2, notrunc, forsize)
1459 enum tree_code code;
1460 register tree arg1, arg2;
1461 int notrunc, forsize;
1462 {
1463 unsigned HOST_WIDE_INT int1l, int2l;
1464 HOST_WIDE_INT int1h, int2h;
1465 unsigned HOST_WIDE_INT low;
1466 HOST_WIDE_INT hi;
1467 unsigned HOST_WIDE_INT garbagel;
1468 HOST_WIDE_INT garbageh;
1469 register tree t;
1470 int uns = TREE_UNSIGNED (TREE_TYPE (arg1));
1471 int overflow = 0;
1472 int no_overflow = 0;
1473
1474 int1l = TREE_INT_CST_LOW (arg1);
1475 int1h = TREE_INT_CST_HIGH (arg1);
1476 int2l = TREE_INT_CST_LOW (arg2);
1477 int2h = TREE_INT_CST_HIGH (arg2);
1478
1479 switch (code)
1480 {
1481 case BIT_IOR_EXPR:
1482 low = int1l | int2l, hi = int1h | int2h;
1483 break;
1484
1485 case BIT_XOR_EXPR:
1486 low = int1l ^ int2l, hi = int1h ^ int2h;
1487 break;
1488
1489 case BIT_AND_EXPR:
1490 low = int1l & int2l, hi = int1h & int2h;
1491 break;
1492
1493 case BIT_ANDTC_EXPR:
1494 low = int1l & ~int2l, hi = int1h & ~int2h;
1495 break;
1496
1497 case RSHIFT_EXPR:
1498 int2l = - int2l;
1499 case LSHIFT_EXPR:
1500 /* It's unclear from the C standard whether shifts can overflow.
1501 The following code ignores overflow; perhaps a C standard
1502 interpretation ruling is needed. */
1503 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (TREE_TYPE (arg1)),
1504 &low, &hi, !uns);
1505 no_overflow = 1;
1506 break;
1507
1508 case RROTATE_EXPR:
1509 int2l = - int2l;
1510 case LROTATE_EXPR:
1511 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (TREE_TYPE (arg1)),
1512 &low, &hi);
1513 break;
1514
1515 case PLUS_EXPR:
1516 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1517 break;
1518
1519 case MINUS_EXPR:
1520 neg_double (int2l, int2h, &low, &hi);
1521 add_double (int1l, int1h, low, hi, &low, &hi);
1522 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1523 break;
1524
1525 case MULT_EXPR:
1526 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1527 break;
1528
1529 case TRUNC_DIV_EXPR:
1530 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1531 case EXACT_DIV_EXPR:
1532 /* This is a shortcut for a common special case. */
1533 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1534 && ! TREE_CONSTANT_OVERFLOW (arg1)
1535 && ! TREE_CONSTANT_OVERFLOW (arg2)
1536 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1537 {
1538 if (code == CEIL_DIV_EXPR)
1539 int1l += int2l - 1;
1540
1541 low = int1l / int2l, hi = 0;
1542 break;
1543 }
1544
1545 /* ... fall through ... */
1546
1547 case ROUND_DIV_EXPR:
1548 if (int2h == 0 && int2l == 1)
1549 {
1550 low = int1l, hi = int1h;
1551 break;
1552 }
1553 if (int1l == int2l && int1h == int2h
1554 && ! (int1l == 0 && int1h == 0))
1555 {
1556 low = 1, hi = 0;
1557 break;
1558 }
1559 overflow = div_and_round_double (code, uns,
1560 int1l, int1h, int2l, int2h,
1561 &low, &hi, &garbagel, &garbageh);
1562 break;
1563
1564 case TRUNC_MOD_EXPR:
1565 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1566 /* This is a shortcut for a common special case. */
1567 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1568 && ! TREE_CONSTANT_OVERFLOW (arg1)
1569 && ! TREE_CONSTANT_OVERFLOW (arg2)
1570 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1571 {
1572 if (code == CEIL_MOD_EXPR)
1573 int1l += int2l - 1;
1574 low = int1l % int2l, hi = 0;
1575 break;
1576 }
1577
1578 /* ... fall through ... */
1579
1580 case ROUND_MOD_EXPR:
1581 overflow = div_and_round_double (code, uns,
1582 int1l, int1h, int2l, int2h,
1583 &garbagel, &garbageh, &low, &hi);
1584 break;
1585
1586 case MIN_EXPR:
1587 case MAX_EXPR:
1588 if (uns)
1589 low = (((unsigned HOST_WIDE_INT) int1h
1590 < (unsigned HOST_WIDE_INT) int2h)
1591 || (((unsigned HOST_WIDE_INT) int1h
1592 == (unsigned HOST_WIDE_INT) int2h)
1593 && int1l < int2l));
1594 else
1595 low = (int1h < int2h
1596 || (int1h == int2h && int1l < int2l));
1597
1598 if (low == (code == MIN_EXPR))
1599 low = int1l, hi = int1h;
1600 else
1601 low = int2l, hi = int2h;
1602 break;
1603
1604 default:
1605 abort ();
1606 }
1607
1608 if (forsize && hi == 0 && low < 10000)
1609 return size_int_type_wide (low, TREE_TYPE (arg1));
1610 else
1611 {
1612 t = build_int_2 (low, hi);
1613 TREE_TYPE (t) = TREE_TYPE (arg1);
1614 }
1615
1616 TREE_OVERFLOW (t)
1617 = ((notrunc ? (!uns || forsize) && overflow
1618 : force_fit_type (t, (!uns || forsize) && overflow) && ! no_overflow)
1619 | TREE_OVERFLOW (arg1)
1620 | TREE_OVERFLOW (arg2));
1621
1622 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1623 So check if force_fit_type truncated the value. */
1624 if (forsize
1625 && ! TREE_OVERFLOW (t)
1626 && (TREE_INT_CST_HIGH (t) != hi
1627 || TREE_INT_CST_LOW (t) != low))
1628 TREE_OVERFLOW (t) = 1;
1629
1630 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1631 | TREE_CONSTANT_OVERFLOW (arg1)
1632 | TREE_CONSTANT_OVERFLOW (arg2));
1633 return t;
1634 }
1635
1636 /* Define input and output argument for const_binop_1. */
1637 struct cb_args
1638 {
1639 enum tree_code code; /* Input: tree code for operation*/
1640 tree type; /* Input: tree type for operation. */
1641 REAL_VALUE_TYPE d1, d2; /* Input: floating point operands. */
1642 tree t; /* Output: constant for result. */
1643 };
1644
1645 /* Do the real arithmetic for const_binop while protected by a
1646 float overflow handler. */
1647
1648 static void
1649 const_binop_1 (data)
1650 PTR data;
1651 {
1652 struct cb_args *args = (struct cb_args *) data;
1653 REAL_VALUE_TYPE value;
1654
1655 #ifdef REAL_ARITHMETIC
1656 REAL_ARITHMETIC (value, args->code, args->d1, args->d2);
1657 #else
1658 switch (args->code)
1659 {
1660 case PLUS_EXPR:
1661 value = args->d1 + args->d2;
1662 break;
1663
1664 case MINUS_EXPR:
1665 value = args->d1 - args->d2;
1666 break;
1667
1668 case MULT_EXPR:
1669 value = args->d1 * args->d2;
1670 break;
1671
1672 case RDIV_EXPR:
1673 #ifndef REAL_INFINITY
1674 if (args->d2 == 0)
1675 abort ();
1676 #endif
1677
1678 value = args->d1 / args->d2;
1679 break;
1680
1681 case MIN_EXPR:
1682 value = MIN (args->d1, args->d2);
1683 break;
1684
1685 case MAX_EXPR:
1686 value = MAX (args->d1, args->d2);
1687 break;
1688
1689 default:
1690 abort ();
1691 }
1692 #endif /* no REAL_ARITHMETIC */
1693
1694 args->t
1695 = build_real (args->type,
1696 real_value_truncate (TYPE_MODE (args->type), value));
1697 }
1698
1699 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1700 constant. We assume ARG1 and ARG2 have the same data type, or at least
1701 are the same kind of constant and the same machine mode.
1702
1703 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1704
1705 static tree
1706 const_binop (code, arg1, arg2, notrunc)
1707 enum tree_code code;
1708 register tree arg1, arg2;
1709 int notrunc;
1710 {
1711 STRIP_NOPS (arg1); STRIP_NOPS (arg2);
1712
1713 if (TREE_CODE (arg1) == INTEGER_CST)
1714 return int_const_binop (code, arg1, arg2, notrunc, 0);
1715
1716 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1717 if (TREE_CODE (arg1) == REAL_CST)
1718 {
1719 REAL_VALUE_TYPE d1;
1720 REAL_VALUE_TYPE d2;
1721 int overflow = 0;
1722 tree t;
1723 struct cb_args args;
1724
1725 d1 = TREE_REAL_CST (arg1);
1726 d2 = TREE_REAL_CST (arg2);
1727
1728 /* If either operand is a NaN, just return it. Otherwise, set up
1729 for floating-point trap; we return an overflow. */
1730 if (REAL_VALUE_ISNAN (d1))
1731 return arg1;
1732 else if (REAL_VALUE_ISNAN (d2))
1733 return arg2;
1734
1735 /* Setup input for const_binop_1() */
1736 args.type = TREE_TYPE (arg1);
1737 args.d1 = d1;
1738 args.d2 = d2;
1739 args.code = code;
1740
1741 if (do_float_handler (const_binop_1, (PTR) &args))
1742 /* Receive output from const_binop_1. */
1743 t = args.t;
1744 else
1745 {
1746 /* We got an exception from const_binop_1. */
1747 t = copy_node (arg1);
1748 overflow = 1;
1749 }
1750
1751 TREE_OVERFLOW (t)
1752 = (force_fit_type (t, overflow)
1753 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1754 TREE_CONSTANT_OVERFLOW (t)
1755 = TREE_OVERFLOW (t)
1756 | TREE_CONSTANT_OVERFLOW (arg1)
1757 | TREE_CONSTANT_OVERFLOW (arg2);
1758 return t;
1759 }
1760 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1761 if (TREE_CODE (arg1) == COMPLEX_CST)
1762 {
1763 register tree type = TREE_TYPE (arg1);
1764 register tree r1 = TREE_REALPART (arg1);
1765 register tree i1 = TREE_IMAGPART (arg1);
1766 register tree r2 = TREE_REALPART (arg2);
1767 register tree i2 = TREE_IMAGPART (arg2);
1768 register tree t;
1769
1770 switch (code)
1771 {
1772 case PLUS_EXPR:
1773 t = build_complex (type,
1774 const_binop (PLUS_EXPR, r1, r2, notrunc),
1775 const_binop (PLUS_EXPR, i1, i2, notrunc));
1776 break;
1777
1778 case MINUS_EXPR:
1779 t = build_complex (type,
1780 const_binop (MINUS_EXPR, r1, r2, notrunc),
1781 const_binop (MINUS_EXPR, i1, i2, notrunc));
1782 break;
1783
1784 case MULT_EXPR:
1785 t = build_complex (type,
1786 const_binop (MINUS_EXPR,
1787 const_binop (MULT_EXPR,
1788 r1, r2, notrunc),
1789 const_binop (MULT_EXPR,
1790 i1, i2, notrunc),
1791 notrunc),
1792 const_binop (PLUS_EXPR,
1793 const_binop (MULT_EXPR,
1794 r1, i2, notrunc),
1795 const_binop (MULT_EXPR,
1796 i1, r2, notrunc),
1797 notrunc));
1798 break;
1799
1800 case RDIV_EXPR:
1801 {
1802 register tree magsquared
1803 = const_binop (PLUS_EXPR,
1804 const_binop (MULT_EXPR, r2, r2, notrunc),
1805 const_binop (MULT_EXPR, i2, i2, notrunc),
1806 notrunc);
1807
1808 t = build_complex (type,
1809 const_binop
1810 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1811 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1812 const_binop (PLUS_EXPR,
1813 const_binop (MULT_EXPR, r1, r2,
1814 notrunc),
1815 const_binop (MULT_EXPR, i1, i2,
1816 notrunc),
1817 notrunc),
1818 magsquared, notrunc),
1819 const_binop
1820 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1821 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1822 const_binop (MINUS_EXPR,
1823 const_binop (MULT_EXPR, i1, r2,
1824 notrunc),
1825 const_binop (MULT_EXPR, r1, i2,
1826 notrunc),
1827 notrunc),
1828 magsquared, notrunc));
1829 }
1830 break;
1831
1832 default:
1833 abort ();
1834 }
1835 return t;
1836 }
1837 return 0;
1838 }
1839 \f
1840 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1841 bits are given by NUMBER and of the sizetype represented by KIND. */
1842
1843 tree
1844 size_int_wide (number, kind)
1845 HOST_WIDE_INT number;
1846 enum size_type_kind kind;
1847 {
1848 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1849 }
1850
1851 /* Likewise, but the desired type is specified explicitly. */
1852
1853 tree
1854 size_int_type_wide (number, type)
1855 HOST_WIDE_INT number;
1856 tree type;
1857 {
1858 /* Type-size nodes already made for small sizes. */
1859 static tree size_table[2048 + 1];
1860 static int init_p = 0;
1861 tree t;
1862
1863 if (ggc_p && ! init_p)
1864 {
1865 ggc_add_tree_root ((tree *) size_table,
1866 sizeof size_table / sizeof (tree));
1867 init_p = 1;
1868 }
1869
1870 /* If this is a positive number that fits in the table we use to hold
1871 cached entries, see if it is already in the table and put it there
1872 if not. */
1873 if (number >= 0 && number < (int) (sizeof size_table / sizeof size_table[0]))
1874 {
1875 if (size_table[number] != 0)
1876 for (t = size_table[number]; t != 0; t = TREE_CHAIN (t))
1877 if (TREE_TYPE (t) == type)
1878 return t;
1879
1880 if (! ggc_p)
1881 {
1882 /* Make this a permanent node. */
1883 push_obstacks_nochange ();
1884 end_temporary_allocation ();
1885 }
1886
1887 t = build_int_2 (number, 0);
1888 TREE_TYPE (t) = type;
1889 TREE_CHAIN (t) = size_table[number];
1890 size_table[number] = t;
1891
1892 if (! ggc_p)
1893 pop_obstacks ();
1894
1895 return t;
1896 }
1897
1898 t = build_int_2 (number, number < 0 ? -1 : 0);
1899 TREE_TYPE (t) = type;
1900 TREE_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (t) = force_fit_type (t, 0);
1901 return t;
1902 }
1903
1904 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1905 is a tree code. The type of the result is taken from the operands.
1906 Both must be the same type integer type and it must be a size type.
1907 If the operands are constant, so is the result. */
1908
1909 tree
1910 size_binop (code, arg0, arg1)
1911 enum tree_code code;
1912 tree arg0, arg1;
1913 {
1914 tree type = TREE_TYPE (arg0);
1915
1916 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1917 || type != TREE_TYPE (arg1))
1918 abort ();
1919
1920 /* Handle the special case of two integer constants faster. */
1921 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1922 {
1923 /* And some specific cases even faster than that. */
1924 if (code == PLUS_EXPR && integer_zerop (arg0))
1925 return arg1;
1926 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1927 && integer_zerop (arg1))
1928 return arg0;
1929 else if (code == MULT_EXPR && integer_onep (arg0))
1930 return arg1;
1931
1932 /* Handle general case of two integer constants. */
1933 return int_const_binop (code, arg0, arg1, 0, 1);
1934 }
1935
1936 if (arg0 == error_mark_node || arg1 == error_mark_node)
1937 return error_mark_node;
1938
1939 return fold (build (code, type, arg0, arg1));
1940 }
1941
1942 /* Given two values, either both of sizetype or both of bitsizetype,
1943 compute the difference between the two values. Return the value
1944 in signed type corresponding to the type of the operands. */
1945
1946 tree
1947 size_diffop (arg0, arg1)
1948 tree arg0, arg1;
1949 {
1950 tree type = TREE_TYPE (arg0);
1951 tree ctype;
1952
1953 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1954 || type != TREE_TYPE (arg1))
1955 abort ();
1956
1957 /* If the type is already signed, just do the simple thing. */
1958 if (! TREE_UNSIGNED (type))
1959 return size_binop (MINUS_EXPR, arg0, arg1);
1960
1961 ctype = (type == bitsizetype || type == ubitsizetype
1962 ? sbitsizetype : ssizetype);
1963
1964 /* If either operand is not a constant, do the conversions to the signed
1965 type and subtract. The hardware will do the right thing with any
1966 overflow in the subtraction. */
1967 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1968 return size_binop (MINUS_EXPR, convert (ctype, arg0),
1969 convert (ctype, arg1));
1970
1971 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1972 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1973 overflow) and negate (which can't either). Special-case a result
1974 of zero while we're here. */
1975 if (tree_int_cst_equal (arg0, arg1))
1976 return convert (ctype, integer_zero_node);
1977 else if (tree_int_cst_lt (arg1, arg0))
1978 return convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1979 else
1980 return size_binop (MINUS_EXPR, convert (ctype, integer_zero_node),
1981 convert (ctype, size_binop (MINUS_EXPR, arg1, arg0)));
1982 }
1983 \f
1984 /* This structure is used to communicate arguments to fold_convert_1. */
1985 struct fc_args
1986 {
1987 tree arg1; /* Input: value to convert. */
1988 tree type; /* Input: type to convert value to. */
1989 tree t; /* Ouput: result of conversion. */
1990 };
1991
1992 /* Function to convert floating-point constants, protected by floating
1993 point exception handler. */
1994
1995 static void
1996 fold_convert_1 (data)
1997 PTR data;
1998 {
1999 struct fc_args * args = (struct fc_args *) data;
2000
2001 args->t = build_real (args->type,
2002 real_value_truncate (TYPE_MODE (args->type),
2003 TREE_REAL_CST (args->arg1)));
2004 }
2005
2006 /* Given T, a tree representing type conversion of ARG1, a constant,
2007 return a constant tree representing the result of conversion. */
2008
2009 static tree
2010 fold_convert (t, arg1)
2011 register tree t;
2012 register tree arg1;
2013 {
2014 register tree type = TREE_TYPE (t);
2015 int overflow = 0;
2016
2017 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2018 {
2019 if (TREE_CODE (arg1) == INTEGER_CST)
2020 {
2021 /* If we would build a constant wider than GCC supports,
2022 leave the conversion unfolded. */
2023 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
2024 return t;
2025
2026 /* If we are trying to make a sizetype for a small integer, use
2027 size_int to pick up cached types to reduce duplicate nodes. */
2028 if (TREE_CODE (type) == INTEGER_CST && TYPE_IS_SIZETYPE (type)
2029 && compare_tree_int (arg1, 10000) < 0)
2030 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
2031
2032 /* Given an integer constant, make new constant with new type,
2033 appropriately sign-extended or truncated. */
2034 t = build_int_2 (TREE_INT_CST_LOW (arg1),
2035 TREE_INT_CST_HIGH (arg1));
2036 TREE_TYPE (t) = type;
2037 /* Indicate an overflow if (1) ARG1 already overflowed,
2038 or (2) force_fit_type indicates an overflow.
2039 Tell force_fit_type that an overflow has already occurred
2040 if ARG1 is a too-large unsigned value and T is signed.
2041 But don't indicate an overflow if converting a pointer. */
2042 TREE_OVERFLOW (t)
2043 = ((force_fit_type (t,
2044 (TREE_INT_CST_HIGH (arg1) < 0
2045 && (TREE_UNSIGNED (type)
2046 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
2047 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
2048 || TREE_OVERFLOW (arg1));
2049 TREE_CONSTANT_OVERFLOW (t)
2050 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2051 }
2052 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
2053 else if (TREE_CODE (arg1) == REAL_CST)
2054 {
2055 /* Don't initialize these, use assignments.
2056 Initialized local aggregates don't work on old compilers. */
2057 REAL_VALUE_TYPE x;
2058 REAL_VALUE_TYPE l;
2059 REAL_VALUE_TYPE u;
2060 tree type1 = TREE_TYPE (arg1);
2061 int no_upper_bound;
2062
2063 x = TREE_REAL_CST (arg1);
2064 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
2065
2066 no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
2067 if (!no_upper_bound)
2068 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
2069
2070 /* See if X will be in range after truncation towards 0.
2071 To compensate for truncation, move the bounds away from 0,
2072 but reject if X exactly equals the adjusted bounds. */
2073 #ifdef REAL_ARITHMETIC
2074 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
2075 if (!no_upper_bound)
2076 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
2077 #else
2078 l--;
2079 if (!no_upper_bound)
2080 u++;
2081 #endif
2082 /* If X is a NaN, use zero instead and show we have an overflow.
2083 Otherwise, range check. */
2084 if (REAL_VALUE_ISNAN (x))
2085 overflow = 1, x = dconst0;
2086 else if (! (REAL_VALUES_LESS (l, x)
2087 && !no_upper_bound
2088 && REAL_VALUES_LESS (x, u)))
2089 overflow = 1;
2090
2091 #ifndef REAL_ARITHMETIC
2092 {
2093 HOST_WIDE_INT low, high;
2094 HOST_WIDE_INT half_word
2095 = (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2);
2096
2097 if (x < 0)
2098 x = -x;
2099
2100 high = (HOST_WIDE_INT) (x / half_word / half_word);
2101 x -= (REAL_VALUE_TYPE) high * half_word * half_word;
2102 if (x >= (REAL_VALUE_TYPE) half_word * half_word / 2)
2103 {
2104 low = x - (REAL_VALUE_TYPE) half_word * half_word / 2;
2105 low |= (HOST_WIDE_INT) -1 << (HOST_BITS_PER_WIDE_INT - 1);
2106 }
2107 else
2108 low = (HOST_WIDE_INT) x;
2109 if (TREE_REAL_CST (arg1) < 0)
2110 neg_double (low, high, &low, &high);
2111 t = build_int_2 (low, high);
2112 }
2113 #else
2114 {
2115 HOST_WIDE_INT low, high;
2116 REAL_VALUE_TO_INT (&low, &high, x);
2117 t = build_int_2 (low, high);
2118 }
2119 #endif
2120 TREE_TYPE (t) = type;
2121 TREE_OVERFLOW (t)
2122 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
2123 TREE_CONSTANT_OVERFLOW (t)
2124 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2125 }
2126 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
2127 TREE_TYPE (t) = type;
2128 }
2129 else if (TREE_CODE (type) == REAL_TYPE)
2130 {
2131 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
2132 if (TREE_CODE (arg1) == INTEGER_CST)
2133 return build_real_from_int_cst (type, arg1);
2134 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
2135 if (TREE_CODE (arg1) == REAL_CST)
2136 {
2137 struct fc_args args;
2138
2139 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
2140 {
2141 t = arg1;
2142 TREE_TYPE (arg1) = type;
2143 return t;
2144 }
2145
2146 /* Setup input for fold_convert_1() */
2147 args.arg1 = arg1;
2148 args.type = type;
2149
2150 if (do_float_handler (fold_convert_1, (PTR) &args))
2151 {
2152 /* Receive output from fold_convert_1() */
2153 t = args.t;
2154 }
2155 else
2156 {
2157 /* We got an exception from fold_convert_1() */
2158 overflow = 1;
2159 t = copy_node (arg1);
2160 }
2161
2162 TREE_OVERFLOW (t)
2163 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
2164 TREE_CONSTANT_OVERFLOW (t)
2165 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2166 return t;
2167 }
2168 }
2169 TREE_CONSTANT (t) = 1;
2170 return t;
2171 }
2172 \f
2173 /* Return an expr equal to X but certainly not valid as an lvalue. */
2174
2175 tree
2176 non_lvalue (x)
2177 tree x;
2178 {
2179 tree result;
2180
2181 /* These things are certainly not lvalues. */
2182 if (TREE_CODE (x) == NON_LVALUE_EXPR
2183 || TREE_CODE (x) == INTEGER_CST
2184 || TREE_CODE (x) == REAL_CST
2185 || TREE_CODE (x) == STRING_CST
2186 || TREE_CODE (x) == ADDR_EXPR)
2187 return x;
2188
2189 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2190 TREE_CONSTANT (result) = TREE_CONSTANT (x);
2191 return result;
2192 }
2193
2194 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2195 Zero means allow extended lvalues. */
2196
2197 int pedantic_lvalues;
2198
2199 /* When pedantic, return an expr equal to X but certainly not valid as a
2200 pedantic lvalue. Otherwise, return X. */
2201
2202 tree
2203 pedantic_non_lvalue (x)
2204 tree x;
2205 {
2206 if (pedantic_lvalues)
2207 return non_lvalue (x);
2208 else
2209 return x;
2210 }
2211 \f
2212 /* Given a tree comparison code, return the code that is the logical inverse
2213 of the given code. It is not safe to do this for floating-point
2214 comparisons, except for NE_EXPR and EQ_EXPR. */
2215
2216 static enum tree_code
2217 invert_tree_comparison (code)
2218 enum tree_code code;
2219 {
2220 switch (code)
2221 {
2222 case EQ_EXPR:
2223 return NE_EXPR;
2224 case NE_EXPR:
2225 return EQ_EXPR;
2226 case GT_EXPR:
2227 return LE_EXPR;
2228 case GE_EXPR:
2229 return LT_EXPR;
2230 case LT_EXPR:
2231 return GE_EXPR;
2232 case LE_EXPR:
2233 return GT_EXPR;
2234 default:
2235 abort ();
2236 }
2237 }
2238
2239 /* Similar, but return the comparison that results if the operands are
2240 swapped. This is safe for floating-point. */
2241
2242 static enum tree_code
2243 swap_tree_comparison (code)
2244 enum tree_code code;
2245 {
2246 switch (code)
2247 {
2248 case EQ_EXPR:
2249 case NE_EXPR:
2250 return code;
2251 case GT_EXPR:
2252 return LT_EXPR;
2253 case GE_EXPR:
2254 return LE_EXPR;
2255 case LT_EXPR:
2256 return GT_EXPR;
2257 case LE_EXPR:
2258 return GE_EXPR;
2259 default:
2260 abort ();
2261 }
2262 }
2263
2264 /* Return nonzero if CODE is a tree code that represents a truth value. */
2265
2266 static int
2267 truth_value_p (code)
2268 enum tree_code code;
2269 {
2270 return (TREE_CODE_CLASS (code) == '<'
2271 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2272 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2273 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2274 }
2275 \f
2276 /* Return nonzero if two operands are necessarily equal.
2277 If ONLY_CONST is non-zero, only return non-zero for constants.
2278 This function tests whether the operands are indistinguishable;
2279 it does not test whether they are equal using C's == operation.
2280 The distinction is important for IEEE floating point, because
2281 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2282 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
2283
2284 int
2285 operand_equal_p (arg0, arg1, only_const)
2286 tree arg0, arg1;
2287 int only_const;
2288 {
2289 /* If both types don't have the same signedness, then we can't consider
2290 them equal. We must check this before the STRIP_NOPS calls
2291 because they may change the signedness of the arguments. */
2292 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
2293 return 0;
2294
2295 STRIP_NOPS (arg0);
2296 STRIP_NOPS (arg1);
2297
2298 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2299 /* This is needed for conversions and for COMPONENT_REF.
2300 Might as well play it safe and always test this. */
2301 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2302 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2303 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2304 return 0;
2305
2306 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2307 We don't care about side effects in that case because the SAVE_EXPR
2308 takes care of that for us. In all other cases, two expressions are
2309 equal if they have no side effects. If we have two identical
2310 expressions with side effects that should be treated the same due
2311 to the only side effects being identical SAVE_EXPR's, that will
2312 be detected in the recursive calls below. */
2313 if (arg0 == arg1 && ! only_const
2314 && (TREE_CODE (arg0) == SAVE_EXPR
2315 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2316 return 1;
2317
2318 /* Next handle constant cases, those for which we can return 1 even
2319 if ONLY_CONST is set. */
2320 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2321 switch (TREE_CODE (arg0))
2322 {
2323 case INTEGER_CST:
2324 return (! TREE_CONSTANT_OVERFLOW (arg0)
2325 && ! TREE_CONSTANT_OVERFLOW (arg1)
2326 && tree_int_cst_equal (arg0, arg1));
2327
2328 case REAL_CST:
2329 return (! TREE_CONSTANT_OVERFLOW (arg0)
2330 && ! TREE_CONSTANT_OVERFLOW (arg1)
2331 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2332 TREE_REAL_CST (arg1)));
2333
2334 case COMPLEX_CST:
2335 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2336 only_const)
2337 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2338 only_const));
2339
2340 case STRING_CST:
2341 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2342 && ! memcmp (TREE_STRING_POINTER (arg0),
2343 TREE_STRING_POINTER (arg1),
2344 TREE_STRING_LENGTH (arg0)));
2345
2346 case ADDR_EXPR:
2347 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2348 0);
2349 default:
2350 break;
2351 }
2352
2353 if (only_const)
2354 return 0;
2355
2356 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2357 {
2358 case '1':
2359 /* Two conversions are equal only if signedness and modes match. */
2360 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2361 && (TREE_UNSIGNED (TREE_TYPE (arg0))
2362 != TREE_UNSIGNED (TREE_TYPE (arg1))))
2363 return 0;
2364
2365 return operand_equal_p (TREE_OPERAND (arg0, 0),
2366 TREE_OPERAND (arg1, 0), 0);
2367
2368 case '<':
2369 case '2':
2370 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2371 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2372 0))
2373 return 1;
2374
2375 /* For commutative ops, allow the other order. */
2376 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
2377 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
2378 || TREE_CODE (arg0) == BIT_IOR_EXPR
2379 || TREE_CODE (arg0) == BIT_XOR_EXPR
2380 || TREE_CODE (arg0) == BIT_AND_EXPR
2381 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
2382 && operand_equal_p (TREE_OPERAND (arg0, 0),
2383 TREE_OPERAND (arg1, 1), 0)
2384 && operand_equal_p (TREE_OPERAND (arg0, 1),
2385 TREE_OPERAND (arg1, 0), 0));
2386
2387 case 'r':
2388 /* If either of the pointer (or reference) expressions we are dereferencing
2389 contain a side effect, these cannot be equal. */
2390 if (TREE_SIDE_EFFECTS (arg0)
2391 || TREE_SIDE_EFFECTS (arg1))
2392 return 0;
2393
2394 switch (TREE_CODE (arg0))
2395 {
2396 case INDIRECT_REF:
2397 return operand_equal_p (TREE_OPERAND (arg0, 0),
2398 TREE_OPERAND (arg1, 0), 0);
2399
2400 case COMPONENT_REF:
2401 case ARRAY_REF:
2402 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2403 TREE_OPERAND (arg1, 0), 0)
2404 && operand_equal_p (TREE_OPERAND (arg0, 1),
2405 TREE_OPERAND (arg1, 1), 0));
2406
2407 case BIT_FIELD_REF:
2408 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2409 TREE_OPERAND (arg1, 0), 0)
2410 && operand_equal_p (TREE_OPERAND (arg0, 1),
2411 TREE_OPERAND (arg1, 1), 0)
2412 && operand_equal_p (TREE_OPERAND (arg0, 2),
2413 TREE_OPERAND (arg1, 2), 0));
2414 default:
2415 return 0;
2416 }
2417
2418 case 'e':
2419 if (TREE_CODE (arg0) == RTL_EXPR)
2420 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2421 return 0;
2422
2423 default:
2424 return 0;
2425 }
2426 }
2427 \f
2428 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2429 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2430
2431 When in doubt, return 0. */
2432
2433 static int
2434 operand_equal_for_comparison_p (arg0, arg1, other)
2435 tree arg0, arg1;
2436 tree other;
2437 {
2438 int unsignedp1, unsignedpo;
2439 tree primarg0, primarg1, primother;
2440 unsigned int correct_width;
2441
2442 if (operand_equal_p (arg0, arg1, 0))
2443 return 1;
2444
2445 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2446 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2447 return 0;
2448
2449 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2450 and see if the inner values are the same. This removes any
2451 signedness comparison, which doesn't matter here. */
2452 primarg0 = arg0, primarg1 = arg1;
2453 STRIP_NOPS (primarg0); STRIP_NOPS (primarg1);
2454 if (operand_equal_p (primarg0, primarg1, 0))
2455 return 1;
2456
2457 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2458 actual comparison operand, ARG0.
2459
2460 First throw away any conversions to wider types
2461 already present in the operands. */
2462
2463 primarg1 = get_narrower (arg1, &unsignedp1);
2464 primother = get_narrower (other, &unsignedpo);
2465
2466 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2467 if (unsignedp1 == unsignedpo
2468 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2469 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2470 {
2471 tree type = TREE_TYPE (arg0);
2472
2473 /* Make sure shorter operand is extended the right way
2474 to match the longer operand. */
2475 primarg1 = convert (signed_or_unsigned_type (unsignedp1,
2476 TREE_TYPE (primarg1)),
2477 primarg1);
2478
2479 if (operand_equal_p (arg0, convert (type, primarg1), 0))
2480 return 1;
2481 }
2482
2483 return 0;
2484 }
2485 \f
2486 /* See if ARG is an expression that is either a comparison or is performing
2487 arithmetic on comparisons. The comparisons must only be comparing
2488 two different values, which will be stored in *CVAL1 and *CVAL2; if
2489 they are non-zero it means that some operands have already been found.
2490 No variables may be used anywhere else in the expression except in the
2491 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2492 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2493
2494 If this is true, return 1. Otherwise, return zero. */
2495
2496 static int
2497 twoval_comparison_p (arg, cval1, cval2, save_p)
2498 tree arg;
2499 tree *cval1, *cval2;
2500 int *save_p;
2501 {
2502 enum tree_code code = TREE_CODE (arg);
2503 char class = TREE_CODE_CLASS (code);
2504
2505 /* We can handle some of the 'e' cases here. */
2506 if (class == 'e' && code == TRUTH_NOT_EXPR)
2507 class = '1';
2508 else if (class == 'e'
2509 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2510 || code == COMPOUND_EXPR))
2511 class = '2';
2512
2513 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2514 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2515 {
2516 /* If we've already found a CVAL1 or CVAL2, this expression is
2517 two complex to handle. */
2518 if (*cval1 || *cval2)
2519 return 0;
2520
2521 class = '1';
2522 *save_p = 1;
2523 }
2524
2525 switch (class)
2526 {
2527 case '1':
2528 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2529
2530 case '2':
2531 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2532 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2533 cval1, cval2, save_p));
2534
2535 case 'c':
2536 return 1;
2537
2538 case 'e':
2539 if (code == COND_EXPR)
2540 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2541 cval1, cval2, save_p)
2542 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2543 cval1, cval2, save_p)
2544 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2545 cval1, cval2, save_p));
2546 return 0;
2547
2548 case '<':
2549 /* First see if we can handle the first operand, then the second. For
2550 the second operand, we know *CVAL1 can't be zero. It must be that
2551 one side of the comparison is each of the values; test for the
2552 case where this isn't true by failing if the two operands
2553 are the same. */
2554
2555 if (operand_equal_p (TREE_OPERAND (arg, 0),
2556 TREE_OPERAND (arg, 1), 0))
2557 return 0;
2558
2559 if (*cval1 == 0)
2560 *cval1 = TREE_OPERAND (arg, 0);
2561 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2562 ;
2563 else if (*cval2 == 0)
2564 *cval2 = TREE_OPERAND (arg, 0);
2565 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2566 ;
2567 else
2568 return 0;
2569
2570 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2571 ;
2572 else if (*cval2 == 0)
2573 *cval2 = TREE_OPERAND (arg, 1);
2574 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2575 ;
2576 else
2577 return 0;
2578
2579 return 1;
2580
2581 default:
2582 return 0;
2583 }
2584 }
2585 \f
2586 /* ARG is a tree that is known to contain just arithmetic operations and
2587 comparisons. Evaluate the operations in the tree substituting NEW0 for
2588 any occurrence of OLD0 as an operand of a comparison and likewise for
2589 NEW1 and OLD1. */
2590
2591 static tree
2592 eval_subst (arg, old0, new0, old1, new1)
2593 tree arg;
2594 tree old0, new0, old1, new1;
2595 {
2596 tree type = TREE_TYPE (arg);
2597 enum tree_code code = TREE_CODE (arg);
2598 char class = TREE_CODE_CLASS (code);
2599
2600 /* We can handle some of the 'e' cases here. */
2601 if (class == 'e' && code == TRUTH_NOT_EXPR)
2602 class = '1';
2603 else if (class == 'e'
2604 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2605 class = '2';
2606
2607 switch (class)
2608 {
2609 case '1':
2610 return fold (build1 (code, type,
2611 eval_subst (TREE_OPERAND (arg, 0),
2612 old0, new0, old1, new1)));
2613
2614 case '2':
2615 return fold (build (code, type,
2616 eval_subst (TREE_OPERAND (arg, 0),
2617 old0, new0, old1, new1),
2618 eval_subst (TREE_OPERAND (arg, 1),
2619 old0, new0, old1, new1)));
2620
2621 case 'e':
2622 switch (code)
2623 {
2624 case SAVE_EXPR:
2625 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2626
2627 case COMPOUND_EXPR:
2628 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2629
2630 case COND_EXPR:
2631 return fold (build (code, type,
2632 eval_subst (TREE_OPERAND (arg, 0),
2633 old0, new0, old1, new1),
2634 eval_subst (TREE_OPERAND (arg, 1),
2635 old0, new0, old1, new1),
2636 eval_subst (TREE_OPERAND (arg, 2),
2637 old0, new0, old1, new1)));
2638 default:
2639 break;
2640 }
2641 /* fall through - ??? */
2642
2643 case '<':
2644 {
2645 tree arg0 = TREE_OPERAND (arg, 0);
2646 tree arg1 = TREE_OPERAND (arg, 1);
2647
2648 /* We need to check both for exact equality and tree equality. The
2649 former will be true if the operand has a side-effect. In that
2650 case, we know the operand occurred exactly once. */
2651
2652 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2653 arg0 = new0;
2654 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2655 arg0 = new1;
2656
2657 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2658 arg1 = new0;
2659 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2660 arg1 = new1;
2661
2662 return fold (build (code, type, arg0, arg1));
2663 }
2664
2665 default:
2666 return arg;
2667 }
2668 }
2669 \f
2670 /* Return a tree for the case when the result of an expression is RESULT
2671 converted to TYPE and OMITTED was previously an operand of the expression
2672 but is now not needed (e.g., we folded OMITTED * 0).
2673
2674 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2675 the conversion of RESULT to TYPE. */
2676
2677 static tree
2678 omit_one_operand (type, result, omitted)
2679 tree type, result, omitted;
2680 {
2681 tree t = convert (type, result);
2682
2683 if (TREE_SIDE_EFFECTS (omitted))
2684 return build (COMPOUND_EXPR, type, omitted, t);
2685
2686 return non_lvalue (t);
2687 }
2688
2689 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2690
2691 static tree
2692 pedantic_omit_one_operand (type, result, omitted)
2693 tree type, result, omitted;
2694 {
2695 tree t = convert (type, result);
2696
2697 if (TREE_SIDE_EFFECTS (omitted))
2698 return build (COMPOUND_EXPR, type, omitted, t);
2699
2700 return pedantic_non_lvalue (t);
2701 }
2702
2703
2704 \f
2705 /* Return a simplified tree node for the truth-negation of ARG. This
2706 never alters ARG itself. We assume that ARG is an operation that
2707 returns a truth value (0 or 1). */
2708
2709 tree
2710 invert_truthvalue (arg)
2711 tree arg;
2712 {
2713 tree type = TREE_TYPE (arg);
2714 enum tree_code code = TREE_CODE (arg);
2715
2716 if (code == ERROR_MARK)
2717 return arg;
2718
2719 /* If this is a comparison, we can simply invert it, except for
2720 floating-point non-equality comparisons, in which case we just
2721 enclose a TRUTH_NOT_EXPR around what we have. */
2722
2723 if (TREE_CODE_CLASS (code) == '<')
2724 {
2725 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2726 && !flag_fast_math && code != NE_EXPR && code != EQ_EXPR)
2727 return build1 (TRUTH_NOT_EXPR, type, arg);
2728 else
2729 return build (invert_tree_comparison (code), type,
2730 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2731 }
2732
2733 switch (code)
2734 {
2735 case INTEGER_CST:
2736 return convert (type, build_int_2 (integer_zerop (arg), 0));
2737
2738 case TRUTH_AND_EXPR:
2739 return build (TRUTH_OR_EXPR, type,
2740 invert_truthvalue (TREE_OPERAND (arg, 0)),
2741 invert_truthvalue (TREE_OPERAND (arg, 1)));
2742
2743 case TRUTH_OR_EXPR:
2744 return build (TRUTH_AND_EXPR, type,
2745 invert_truthvalue (TREE_OPERAND (arg, 0)),
2746 invert_truthvalue (TREE_OPERAND (arg, 1)));
2747
2748 case TRUTH_XOR_EXPR:
2749 /* Here we can invert either operand. We invert the first operand
2750 unless the second operand is a TRUTH_NOT_EXPR in which case our
2751 result is the XOR of the first operand with the inside of the
2752 negation of the second operand. */
2753
2754 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2755 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2756 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2757 else
2758 return build (TRUTH_XOR_EXPR, type,
2759 invert_truthvalue (TREE_OPERAND (arg, 0)),
2760 TREE_OPERAND (arg, 1));
2761
2762 case TRUTH_ANDIF_EXPR:
2763 return build (TRUTH_ORIF_EXPR, type,
2764 invert_truthvalue (TREE_OPERAND (arg, 0)),
2765 invert_truthvalue (TREE_OPERAND (arg, 1)));
2766
2767 case TRUTH_ORIF_EXPR:
2768 return build (TRUTH_ANDIF_EXPR, type,
2769 invert_truthvalue (TREE_OPERAND (arg, 0)),
2770 invert_truthvalue (TREE_OPERAND (arg, 1)));
2771
2772 case TRUTH_NOT_EXPR:
2773 return TREE_OPERAND (arg, 0);
2774
2775 case COND_EXPR:
2776 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2777 invert_truthvalue (TREE_OPERAND (arg, 1)),
2778 invert_truthvalue (TREE_OPERAND (arg, 2)));
2779
2780 case COMPOUND_EXPR:
2781 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2782 invert_truthvalue (TREE_OPERAND (arg, 1)));
2783
2784 case WITH_RECORD_EXPR:
2785 return build (WITH_RECORD_EXPR, type,
2786 invert_truthvalue (TREE_OPERAND (arg, 0)),
2787 TREE_OPERAND (arg, 1));
2788
2789 case NON_LVALUE_EXPR:
2790 return invert_truthvalue (TREE_OPERAND (arg, 0));
2791
2792 case NOP_EXPR:
2793 case CONVERT_EXPR:
2794 case FLOAT_EXPR:
2795 return build1 (TREE_CODE (arg), type,
2796 invert_truthvalue (TREE_OPERAND (arg, 0)));
2797
2798 case BIT_AND_EXPR:
2799 if (!integer_onep (TREE_OPERAND (arg, 1)))
2800 break;
2801 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2802
2803 case SAVE_EXPR:
2804 return build1 (TRUTH_NOT_EXPR, type, arg);
2805
2806 case CLEANUP_POINT_EXPR:
2807 return build1 (CLEANUP_POINT_EXPR, type,
2808 invert_truthvalue (TREE_OPERAND (arg, 0)));
2809
2810 default:
2811 break;
2812 }
2813 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2814 abort ();
2815 return build1 (TRUTH_NOT_EXPR, type, arg);
2816 }
2817
2818 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2819 operands are another bit-wise operation with a common input. If so,
2820 distribute the bit operations to save an operation and possibly two if
2821 constants are involved. For example, convert
2822 (A | B) & (A | C) into A | (B & C)
2823 Further simplification will occur if B and C are constants.
2824
2825 If this optimization cannot be done, 0 will be returned. */
2826
2827 static tree
2828 distribute_bit_expr (code, type, arg0, arg1)
2829 enum tree_code code;
2830 tree type;
2831 tree arg0, arg1;
2832 {
2833 tree common;
2834 tree left, right;
2835
2836 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2837 || TREE_CODE (arg0) == code
2838 || (TREE_CODE (arg0) != BIT_AND_EXPR
2839 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2840 return 0;
2841
2842 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2843 {
2844 common = TREE_OPERAND (arg0, 0);
2845 left = TREE_OPERAND (arg0, 1);
2846 right = TREE_OPERAND (arg1, 1);
2847 }
2848 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2849 {
2850 common = TREE_OPERAND (arg0, 0);
2851 left = TREE_OPERAND (arg0, 1);
2852 right = TREE_OPERAND (arg1, 0);
2853 }
2854 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2855 {
2856 common = TREE_OPERAND (arg0, 1);
2857 left = TREE_OPERAND (arg0, 0);
2858 right = TREE_OPERAND (arg1, 1);
2859 }
2860 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2861 {
2862 common = TREE_OPERAND (arg0, 1);
2863 left = TREE_OPERAND (arg0, 0);
2864 right = TREE_OPERAND (arg1, 0);
2865 }
2866 else
2867 return 0;
2868
2869 return fold (build (TREE_CODE (arg0), type, common,
2870 fold (build (code, type, left, right))));
2871 }
2872 \f
2873 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2874 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2875
2876 static tree
2877 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2878 tree inner;
2879 tree type;
2880 int bitsize, bitpos;
2881 int unsignedp;
2882 {
2883 tree result = build (BIT_FIELD_REF, type, inner,
2884 size_int (bitsize), bitsize_int (bitpos));
2885
2886 TREE_UNSIGNED (result) = unsignedp;
2887
2888 return result;
2889 }
2890
2891 /* Optimize a bit-field compare.
2892
2893 There are two cases: First is a compare against a constant and the
2894 second is a comparison of two items where the fields are at the same
2895 bit position relative to the start of a chunk (byte, halfword, word)
2896 large enough to contain it. In these cases we can avoid the shift
2897 implicit in bitfield extractions.
2898
2899 For constants, we emit a compare of the shifted constant with the
2900 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2901 compared. For two fields at the same position, we do the ANDs with the
2902 similar mask and compare the result of the ANDs.
2903
2904 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2905 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2906 are the left and right operands of the comparison, respectively.
2907
2908 If the optimization described above can be done, we return the resulting
2909 tree. Otherwise we return zero. */
2910
2911 static tree
2912 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2913 enum tree_code code;
2914 tree compare_type;
2915 tree lhs, rhs;
2916 {
2917 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2918 tree type = TREE_TYPE (lhs);
2919 tree signed_type, unsigned_type;
2920 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2921 enum machine_mode lmode, rmode, nmode;
2922 int lunsignedp, runsignedp;
2923 int lvolatilep = 0, rvolatilep = 0;
2924 unsigned int alignment;
2925 tree linner, rinner = NULL_TREE;
2926 tree mask;
2927 tree offset;
2928
2929 /* Get all the information about the extractions being done. If the bit size
2930 if the same as the size of the underlying object, we aren't doing an
2931 extraction at all and so can do nothing. We also don't want to
2932 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2933 then will no longer be able to replace it. */
2934 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2935 &lunsignedp, &lvolatilep, &alignment);
2936 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2937 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2938 return 0;
2939
2940 if (!const_p)
2941 {
2942 /* If this is not a constant, we can only do something if bit positions,
2943 sizes, and signedness are the same. */
2944 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2945 &runsignedp, &rvolatilep, &alignment);
2946
2947 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2948 || lunsignedp != runsignedp || offset != 0
2949 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2950 return 0;
2951 }
2952
2953 /* See if we can find a mode to refer to this field. We should be able to,
2954 but fail if we can't. */
2955 nmode = get_best_mode (lbitsize, lbitpos,
2956 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2957 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2958 TYPE_ALIGN (TREE_TYPE (rinner))),
2959 word_mode, lvolatilep || rvolatilep);
2960 if (nmode == VOIDmode)
2961 return 0;
2962
2963 /* Set signed and unsigned types of the precision of this mode for the
2964 shifts below. */
2965 signed_type = type_for_mode (nmode, 0);
2966 unsigned_type = type_for_mode (nmode, 1);
2967
2968 /* Compute the bit position and size for the new reference and our offset
2969 within it. If the new reference is the same size as the original, we
2970 won't optimize anything, so return zero. */
2971 nbitsize = GET_MODE_BITSIZE (nmode);
2972 nbitpos = lbitpos & ~ (nbitsize - 1);
2973 lbitpos -= nbitpos;
2974 if (nbitsize == lbitsize)
2975 return 0;
2976
2977 if (BYTES_BIG_ENDIAN)
2978 lbitpos = nbitsize - lbitsize - lbitpos;
2979
2980 /* Make the mask to be used against the extracted field. */
2981 mask = build_int_2 (~0, ~0);
2982 TREE_TYPE (mask) = unsigned_type;
2983 force_fit_type (mask, 0);
2984 mask = convert (unsigned_type, mask);
2985 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2986 mask = const_binop (RSHIFT_EXPR, mask,
2987 size_int (nbitsize - lbitsize - lbitpos), 0);
2988
2989 if (! const_p)
2990 /* If not comparing with constant, just rework the comparison
2991 and return. */
2992 return build (code, compare_type,
2993 build (BIT_AND_EXPR, unsigned_type,
2994 make_bit_field_ref (linner, unsigned_type,
2995 nbitsize, nbitpos, 1),
2996 mask),
2997 build (BIT_AND_EXPR, unsigned_type,
2998 make_bit_field_ref (rinner, unsigned_type,
2999 nbitsize, nbitpos, 1),
3000 mask));
3001
3002 /* Otherwise, we are handling the constant case. See if the constant is too
3003 big for the field. Warn and return a tree of for 0 (false) if so. We do
3004 this not only for its own sake, but to avoid having to test for this
3005 error case below. If we didn't, we might generate wrong code.
3006
3007 For unsigned fields, the constant shifted right by the field length should
3008 be all zero. For signed fields, the high-order bits should agree with
3009 the sign bit. */
3010
3011 if (lunsignedp)
3012 {
3013 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3014 convert (unsigned_type, rhs),
3015 size_int (lbitsize), 0)))
3016 {
3017 warning ("comparison is always %d due to width of bitfield",
3018 code == NE_EXPR);
3019 return convert (compare_type,
3020 (code == NE_EXPR
3021 ? integer_one_node : integer_zero_node));
3022 }
3023 }
3024 else
3025 {
3026 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
3027 size_int (lbitsize - 1), 0);
3028 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3029 {
3030 warning ("comparison is always %d due to width of bitfield",
3031 code == NE_EXPR);
3032 return convert (compare_type,
3033 (code == NE_EXPR
3034 ? integer_one_node : integer_zero_node));
3035 }
3036 }
3037
3038 /* Single-bit compares should always be against zero. */
3039 if (lbitsize == 1 && ! integer_zerop (rhs))
3040 {
3041 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3042 rhs = convert (type, integer_zero_node);
3043 }
3044
3045 /* Make a new bitfield reference, shift the constant over the
3046 appropriate number of bits and mask it with the computed mask
3047 (in case this was a signed field). If we changed it, make a new one. */
3048 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3049 if (lvolatilep)
3050 {
3051 TREE_SIDE_EFFECTS (lhs) = 1;
3052 TREE_THIS_VOLATILE (lhs) = 1;
3053 }
3054
3055 rhs = fold (const_binop (BIT_AND_EXPR,
3056 const_binop (LSHIFT_EXPR,
3057 convert (unsigned_type, rhs),
3058 size_int (lbitpos), 0),
3059 mask, 0));
3060
3061 return build (code, compare_type,
3062 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
3063 rhs);
3064 }
3065 \f
3066 /* Subroutine for fold_truthop: decode a field reference.
3067
3068 If EXP is a comparison reference, we return the innermost reference.
3069
3070 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3071 set to the starting bit number.
3072
3073 If the innermost field can be completely contained in a mode-sized
3074 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3075
3076 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3077 otherwise it is not changed.
3078
3079 *PUNSIGNEDP is set to the signedness of the field.
3080
3081 *PMASK is set to the mask used. This is either contained in a
3082 BIT_AND_EXPR or derived from the width of the field.
3083
3084 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3085
3086 Return 0 if this is not a component reference or is one that we can't
3087 do anything with. */
3088
3089 static tree
3090 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
3091 pvolatilep, pmask, pand_mask)
3092 tree exp;
3093 HOST_WIDE_INT *pbitsize, *pbitpos;
3094 enum machine_mode *pmode;
3095 int *punsignedp, *pvolatilep;
3096 tree *pmask;
3097 tree *pand_mask;
3098 {
3099 tree and_mask = 0;
3100 tree mask, inner, offset;
3101 tree unsigned_type;
3102 unsigned int precision;
3103 unsigned int alignment;
3104
3105 /* All the optimizations using this function assume integer fields.
3106 There are problems with FP fields since the type_for_size call
3107 below can fail for, e.g., XFmode. */
3108 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3109 return 0;
3110
3111 STRIP_NOPS (exp);
3112
3113 if (TREE_CODE (exp) == BIT_AND_EXPR)
3114 {
3115 and_mask = TREE_OPERAND (exp, 1);
3116 exp = TREE_OPERAND (exp, 0);
3117 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3118 if (TREE_CODE (and_mask) != INTEGER_CST)
3119 return 0;
3120 }
3121
3122
3123 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3124 punsignedp, pvolatilep, &alignment);
3125 if ((inner == exp && and_mask == 0)
3126 || *pbitsize < 0 || offset != 0
3127 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3128 return 0;
3129
3130 /* Compute the mask to access the bitfield. */
3131 unsigned_type = type_for_size (*pbitsize, 1);
3132 precision = TYPE_PRECISION (unsigned_type);
3133
3134 mask = build_int_2 (~0, ~0);
3135 TREE_TYPE (mask) = unsigned_type;
3136 force_fit_type (mask, 0);
3137 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3138 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3139
3140 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3141 if (and_mask != 0)
3142 mask = fold (build (BIT_AND_EXPR, unsigned_type,
3143 convert (unsigned_type, and_mask), mask));
3144
3145 *pmask = mask;
3146 *pand_mask = and_mask;
3147 return inner;
3148 }
3149
3150 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
3151 bit positions. */
3152
3153 static int
3154 all_ones_mask_p (mask, size)
3155 tree mask;
3156 int size;
3157 {
3158 tree type = TREE_TYPE (mask);
3159 unsigned int precision = TYPE_PRECISION (type);
3160 tree tmask;
3161
3162 tmask = build_int_2 (~0, ~0);
3163 TREE_TYPE (tmask) = signed_type (type);
3164 force_fit_type (tmask, 0);
3165 return
3166 tree_int_cst_equal (mask,
3167 const_binop (RSHIFT_EXPR,
3168 const_binop (LSHIFT_EXPR, tmask,
3169 size_int (precision - size),
3170 0),
3171 size_int (precision - size), 0));
3172 }
3173
3174 /* Subroutine for fold_truthop: determine if an operand is simple enough
3175 to be evaluated unconditionally. */
3176
3177 static int
3178 simple_operand_p (exp)
3179 tree exp;
3180 {
3181 /* Strip any conversions that don't change the machine mode. */
3182 while ((TREE_CODE (exp) == NOP_EXPR
3183 || TREE_CODE (exp) == CONVERT_EXPR)
3184 && (TYPE_MODE (TREE_TYPE (exp))
3185 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3186 exp = TREE_OPERAND (exp, 0);
3187
3188 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3189 || (DECL_P (exp)
3190 && ! TREE_ADDRESSABLE (exp)
3191 && ! TREE_THIS_VOLATILE (exp)
3192 && ! DECL_NONLOCAL (exp)
3193 /* Don't regard global variables as simple. They may be
3194 allocated in ways unknown to the compiler (shared memory,
3195 #pragma weak, etc). */
3196 && ! TREE_PUBLIC (exp)
3197 && ! DECL_EXTERNAL (exp)
3198 /* Loading a static variable is unduly expensive, but global
3199 registers aren't expensive. */
3200 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3201 }
3202 \f
3203 /* The following functions are subroutines to fold_range_test and allow it to
3204 try to change a logical combination of comparisons into a range test.
3205
3206 For example, both
3207 X == 2 && X == 3 && X == 4 && X == 5
3208 and
3209 X >= 2 && X <= 5
3210 are converted to
3211 (unsigned) (X - 2) <= 3
3212
3213 We describe each set of comparisons as being either inside or outside
3214 a range, using a variable named like IN_P, and then describe the
3215 range with a lower and upper bound. If one of the bounds is omitted,
3216 it represents either the highest or lowest value of the type.
3217
3218 In the comments below, we represent a range by two numbers in brackets
3219 preceded by a "+" to designate being inside that range, or a "-" to
3220 designate being outside that range, so the condition can be inverted by
3221 flipping the prefix. An omitted bound is represented by a "-". For
3222 example, "- [-, 10]" means being outside the range starting at the lowest
3223 possible value and ending at 10, in other words, being greater than 10.
3224 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3225 always false.
3226
3227 We set up things so that the missing bounds are handled in a consistent
3228 manner so neither a missing bound nor "true" and "false" need to be
3229 handled using a special case. */
3230
3231 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3232 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3233 and UPPER1_P are nonzero if the respective argument is an upper bound
3234 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3235 must be specified for a comparison. ARG1 will be converted to ARG0's
3236 type if both are specified. */
3237
3238 static tree
3239 range_binop (code, type, arg0, upper0_p, arg1, upper1_p)
3240 enum tree_code code;
3241 tree type;
3242 tree arg0, arg1;
3243 int upper0_p, upper1_p;
3244 {
3245 tree tem;
3246 int result;
3247 int sgn0, sgn1;
3248
3249 /* If neither arg represents infinity, do the normal operation.
3250 Else, if not a comparison, return infinity. Else handle the special
3251 comparison rules. Note that most of the cases below won't occur, but
3252 are handled for consistency. */
3253
3254 if (arg0 != 0 && arg1 != 0)
3255 {
3256 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
3257 arg0, convert (TREE_TYPE (arg0), arg1)));
3258 STRIP_NOPS (tem);
3259 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3260 }
3261
3262 if (TREE_CODE_CLASS (code) != '<')
3263 return 0;
3264
3265 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3266 for neither. In real maths, we cannot assume open ended ranges are
3267 the same. But, this is computer arithmetic, where numbers are finite.
3268 We can therefore make the transformation of any unbounded range with
3269 the value Z, Z being greater than any representable number. This permits
3270 us to treat unbounded ranges as equal. */
3271 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3272 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3273 switch (code)
3274 {
3275 case EQ_EXPR:
3276 result = sgn0 == sgn1;
3277 break;
3278 case NE_EXPR:
3279 result = sgn0 != sgn1;
3280 break;
3281 case LT_EXPR:
3282 result = sgn0 < sgn1;
3283 break;
3284 case LE_EXPR:
3285 result = sgn0 <= sgn1;
3286 break;
3287 case GT_EXPR:
3288 result = sgn0 > sgn1;
3289 break;
3290 case GE_EXPR:
3291 result = sgn0 >= sgn1;
3292 break;
3293 default:
3294 abort ();
3295 }
3296
3297 return convert (type, result ? integer_one_node : integer_zero_node);
3298 }
3299 \f
3300 /* Given EXP, a logical expression, set the range it is testing into
3301 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3302 actually being tested. *PLOW and *PHIGH will have be made the same type
3303 as the returned expression. If EXP is not a comparison, we will most
3304 likely not be returning a useful value and range. */
3305
3306 static tree
3307 make_range (exp, pin_p, plow, phigh)
3308 tree exp;
3309 int *pin_p;
3310 tree *plow, *phigh;
3311 {
3312 enum tree_code code;
3313 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3314 tree orig_type = NULL_TREE;
3315 int in_p, n_in_p;
3316 tree low, high, n_low, n_high;
3317
3318 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3319 and see if we can refine the range. Some of the cases below may not
3320 happen, but it doesn't seem worth worrying about this. We "continue"
3321 the outer loop when we've changed something; otherwise we "break"
3322 the switch, which will "break" the while. */
3323
3324 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
3325
3326 while (1)
3327 {
3328 code = TREE_CODE (exp);
3329
3330 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3331 {
3332 arg0 = TREE_OPERAND (exp, 0);
3333 if (TREE_CODE_CLASS (code) == '<'
3334 || TREE_CODE_CLASS (code) == '1'
3335 || TREE_CODE_CLASS (code) == '2')
3336 type = TREE_TYPE (arg0);
3337 if (TREE_CODE_CLASS (code) == '2'
3338 || TREE_CODE_CLASS (code) == '<'
3339 || (TREE_CODE_CLASS (code) == 'e'
3340 && tree_code_length[(int) code] > 1))
3341 arg1 = TREE_OPERAND (exp, 1);
3342 }
3343
3344 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3345 lose a cast by accident. */
3346 if (type != NULL_TREE && orig_type == NULL_TREE)
3347 orig_type = type;
3348
3349 switch (code)
3350 {
3351 case TRUTH_NOT_EXPR:
3352 in_p = ! in_p, exp = arg0;
3353 continue;
3354
3355 case EQ_EXPR: case NE_EXPR:
3356 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3357 /* We can only do something if the range is testing for zero
3358 and if the second operand is an integer constant. Note that
3359 saying something is "in" the range we make is done by
3360 complementing IN_P since it will set in the initial case of
3361 being not equal to zero; "out" is leaving it alone. */
3362 if (low == 0 || high == 0
3363 || ! integer_zerop (low) || ! integer_zerop (high)
3364 || TREE_CODE (arg1) != INTEGER_CST)
3365 break;
3366
3367 switch (code)
3368 {
3369 case NE_EXPR: /* - [c, c] */
3370 low = high = arg1;
3371 break;
3372 case EQ_EXPR: /* + [c, c] */
3373 in_p = ! in_p, low = high = arg1;
3374 break;
3375 case GT_EXPR: /* - [-, c] */
3376 low = 0, high = arg1;
3377 break;
3378 case GE_EXPR: /* + [c, -] */
3379 in_p = ! in_p, low = arg1, high = 0;
3380 break;
3381 case LT_EXPR: /* - [c, -] */
3382 low = arg1, high = 0;
3383 break;
3384 case LE_EXPR: /* + [-, c] */
3385 in_p = ! in_p, low = 0, high = arg1;
3386 break;
3387 default:
3388 abort ();
3389 }
3390
3391 exp = arg0;
3392
3393 /* If this is an unsigned comparison, we also know that EXP is
3394 greater than or equal to zero. We base the range tests we make
3395 on that fact, so we record it here so we can parse existing
3396 range tests. */
3397 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3398 {
3399 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3400 1, convert (type, integer_zero_node),
3401 NULL_TREE))
3402 break;
3403
3404 in_p = n_in_p, low = n_low, high = n_high;
3405
3406 /* If the high bound is missing, but we
3407 have a low bound, reverse the range so
3408 it goes from zero to the low bound minus 1. */
3409 if (high == 0 && low)
3410 {
3411 in_p = ! in_p;
3412 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3413 integer_one_node, 0);
3414 low = convert (type, integer_zero_node);
3415 }
3416 }
3417 continue;
3418
3419 case NEGATE_EXPR:
3420 /* (-x) IN [a,b] -> x in [-b, -a] */
3421 n_low = range_binop (MINUS_EXPR, type,
3422 convert (type, integer_zero_node), 0, high, 1);
3423 n_high = range_binop (MINUS_EXPR, type,
3424 convert (type, integer_zero_node), 0, low, 0);
3425 low = n_low, high = n_high;
3426 exp = arg0;
3427 continue;
3428
3429 case BIT_NOT_EXPR:
3430 /* ~ X -> -X - 1 */
3431 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3432 convert (type, integer_one_node));
3433 continue;
3434
3435 case PLUS_EXPR: case MINUS_EXPR:
3436 if (TREE_CODE (arg1) != INTEGER_CST)
3437 break;
3438
3439 /* If EXP is signed, any overflow in the computation is undefined,
3440 so we don't worry about it so long as our computations on
3441 the bounds don't overflow. For unsigned, overflow is defined
3442 and this is exactly the right thing. */
3443 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3444 type, low, 0, arg1, 0);
3445 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3446 type, high, 1, arg1, 0);
3447 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3448 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3449 break;
3450
3451 /* Check for an unsigned range which has wrapped around the maximum
3452 value thus making n_high < n_low, and normalize it. */
3453 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3454 {
3455 low = range_binop (PLUS_EXPR, type, n_high, 0,
3456 integer_one_node, 0);
3457 high = range_binop (MINUS_EXPR, type, n_low, 0,
3458 integer_one_node, 0);
3459 in_p = ! in_p;
3460 }
3461 else
3462 low = n_low, high = n_high;
3463
3464 exp = arg0;
3465 continue;
3466
3467 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3468 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3469 break;
3470
3471 if (! INTEGRAL_TYPE_P (type)
3472 || (low != 0 && ! int_fits_type_p (low, type))
3473 || (high != 0 && ! int_fits_type_p (high, type)))
3474 break;
3475
3476 n_low = low, n_high = high;
3477
3478 if (n_low != 0)
3479 n_low = convert (type, n_low);
3480
3481 if (n_high != 0)
3482 n_high = convert (type, n_high);
3483
3484 /* If we're converting from an unsigned to a signed type,
3485 we will be doing the comparison as unsigned. The tests above
3486 have already verified that LOW and HIGH are both positive.
3487
3488 So we have to make sure that the original unsigned value will
3489 be interpreted as positive. */
3490 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3491 {
3492 tree equiv_type = type_for_mode (TYPE_MODE (type), 1);
3493 tree high_positive;
3494
3495 /* A range without an upper bound is, naturally, unbounded.
3496 Since convert would have cropped a very large value, use
3497 the max value for the destination type. */
3498 high_positive
3499 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3500 : TYPE_MAX_VALUE (type);
3501
3502 high_positive = fold (build (RSHIFT_EXPR, type,
3503 convert (type, high_positive),
3504 convert (type, integer_one_node)));
3505
3506 /* If the low bound is specified, "and" the range with the
3507 range for which the original unsigned value will be
3508 positive. */
3509 if (low != 0)
3510 {
3511 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3512 1, n_low, n_high,
3513 1, convert (type, integer_zero_node),
3514 high_positive))
3515 break;
3516
3517 in_p = (n_in_p == in_p);
3518 }
3519 else
3520 {
3521 /* Otherwise, "or" the range with the range of the input
3522 that will be interpreted as negative. */
3523 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3524 0, n_low, n_high,
3525 1, convert (type, integer_zero_node),
3526 high_positive))
3527 break;
3528
3529 in_p = (in_p != n_in_p);
3530 }
3531 }
3532
3533 exp = arg0;
3534 low = n_low, high = n_high;
3535 continue;
3536
3537 default:
3538 break;
3539 }
3540
3541 break;
3542 }
3543
3544 /* If EXP is a constant, we can evaluate whether this is true or false. */
3545 if (TREE_CODE (exp) == INTEGER_CST)
3546 {
3547 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3548 exp, 0, low, 0))
3549 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3550 exp, 1, high, 1)));
3551 low = high = 0;
3552 exp = 0;
3553 }
3554
3555 *pin_p = in_p, *plow = low, *phigh = high;
3556 return exp;
3557 }
3558 \f
3559 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3560 type, TYPE, return an expression to test if EXP is in (or out of, depending
3561 on IN_P) the range. */
3562
3563 static tree
3564 build_range_check (type, exp, in_p, low, high)
3565 tree type;
3566 tree exp;
3567 int in_p;
3568 tree low, high;
3569 {
3570 tree etype = TREE_TYPE (exp);
3571 tree utype, value;
3572
3573 if (! in_p
3574 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3575 return invert_truthvalue (value);
3576
3577 else if (low == 0 && high == 0)
3578 return convert (type, integer_one_node);
3579
3580 else if (low == 0)
3581 return fold (build (LE_EXPR, type, exp, high));
3582
3583 else if (high == 0)
3584 return fold (build (GE_EXPR, type, exp, low));
3585
3586 else if (operand_equal_p (low, high, 0))
3587 return fold (build (EQ_EXPR, type, exp, low));
3588
3589 else if (TREE_UNSIGNED (etype) && integer_zerop (low))
3590 return build_range_check (type, exp, 1, 0, high);
3591
3592 else if (integer_zerop (low))
3593 {
3594 utype = unsigned_type (etype);
3595 return build_range_check (type, convert (utype, exp), 1, 0,
3596 convert (utype, high));
3597 }
3598
3599 else if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3600 && ! TREE_OVERFLOW (value))
3601 return build_range_check (type,
3602 fold (build (MINUS_EXPR, etype, exp, low)),
3603 1, convert (etype, integer_zero_node), value);
3604 else
3605 return 0;
3606 }
3607 \f
3608 /* Given two ranges, see if we can merge them into one. Return 1 if we
3609 can, 0 if we can't. Set the output range into the specified parameters. */
3610
3611 static int
3612 merge_ranges (pin_p, plow, phigh, in0_p, low0, high0, in1_p, low1, high1)
3613 int *pin_p;
3614 tree *plow, *phigh;
3615 int in0_p, in1_p;
3616 tree low0, high0, low1, high1;
3617 {
3618 int no_overlap;
3619 int subset;
3620 int temp;
3621 tree tem;
3622 int in_p;
3623 tree low, high;
3624 int lowequal = ((low0 == 0 && low1 == 0)
3625 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3626 low0, 0, low1, 0)));
3627 int highequal = ((high0 == 0 && high1 == 0)
3628 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3629 high0, 1, high1, 1)));
3630
3631 /* Make range 0 be the range that starts first, or ends last if they
3632 start at the same value. Swap them if it isn't. */
3633 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3634 low0, 0, low1, 0))
3635 || (lowequal
3636 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3637 high1, 1, high0, 1))))
3638 {
3639 temp = in0_p, in0_p = in1_p, in1_p = temp;
3640 tem = low0, low0 = low1, low1 = tem;
3641 tem = high0, high0 = high1, high1 = tem;
3642 }
3643
3644 /* Now flag two cases, whether the ranges are disjoint or whether the
3645 second range is totally subsumed in the first. Note that the tests
3646 below are simplified by the ones above. */
3647 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3648 high0, 1, low1, 0));
3649 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3650 high1, 1, high0, 1));
3651
3652 /* We now have four cases, depending on whether we are including or
3653 excluding the two ranges. */
3654 if (in0_p && in1_p)
3655 {
3656 /* If they don't overlap, the result is false. If the second range
3657 is a subset it is the result. Otherwise, the range is from the start
3658 of the second to the end of the first. */
3659 if (no_overlap)
3660 in_p = 0, low = high = 0;
3661 else if (subset)
3662 in_p = 1, low = low1, high = high1;
3663 else
3664 in_p = 1, low = low1, high = high0;
3665 }
3666
3667 else if (in0_p && ! in1_p)
3668 {
3669 /* If they don't overlap, the result is the first range. If they are
3670 equal, the result is false. If the second range is a subset of the
3671 first, and the ranges begin at the same place, we go from just after
3672 the end of the first range to the end of the second. If the second
3673 range is not a subset of the first, or if it is a subset and both
3674 ranges end at the same place, the range starts at the start of the
3675 first range and ends just before the second range.
3676 Otherwise, we can't describe this as a single range. */
3677 if (no_overlap)
3678 in_p = 1, low = low0, high = high0;
3679 else if (lowequal && highequal)
3680 in_p = 0, low = high = 0;
3681 else if (subset && lowequal)
3682 {
3683 in_p = 1, high = high0;
3684 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3685 integer_one_node, 0);
3686 }
3687 else if (! subset || highequal)
3688 {
3689 in_p = 1, low = low0;
3690 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3691 integer_one_node, 0);
3692 }
3693 else
3694 return 0;
3695 }
3696
3697 else if (! in0_p && in1_p)
3698 {
3699 /* If they don't overlap, the result is the second range. If the second
3700 is a subset of the first, the result is false. Otherwise,
3701 the range starts just after the first range and ends at the
3702 end of the second. */
3703 if (no_overlap)
3704 in_p = 1, low = low1, high = high1;
3705 else if (subset || highequal)
3706 in_p = 0, low = high = 0;
3707 else
3708 {
3709 in_p = 1, high = high1;
3710 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3711 integer_one_node, 0);
3712 }
3713 }
3714
3715 else
3716 {
3717 /* The case where we are excluding both ranges. Here the complex case
3718 is if they don't overlap. In that case, the only time we have a
3719 range is if they are adjacent. If the second is a subset of the
3720 first, the result is the first. Otherwise, the range to exclude
3721 starts at the beginning of the first range and ends at the end of the
3722 second. */
3723 if (no_overlap)
3724 {
3725 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3726 range_binop (PLUS_EXPR, NULL_TREE,
3727 high0, 1,
3728 integer_one_node, 1),
3729 1, low1, 0)))
3730 in_p = 0, low = low0, high = high1;
3731 else
3732 return 0;
3733 }
3734 else if (subset)
3735 in_p = 0, low = low0, high = high0;
3736 else
3737 in_p = 0, low = low0, high = high1;
3738 }
3739
3740 *pin_p = in_p, *plow = low, *phigh = high;
3741 return 1;
3742 }
3743 \f
3744 /* EXP is some logical combination of boolean tests. See if we can
3745 merge it into some range test. Return the new tree if so. */
3746
3747 static tree
3748 fold_range_test (exp)
3749 tree exp;
3750 {
3751 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3752 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3753 int in0_p, in1_p, in_p;
3754 tree low0, low1, low, high0, high1, high;
3755 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3756 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3757 tree tem;
3758
3759 /* If this is an OR operation, invert both sides; we will invert
3760 again at the end. */
3761 if (or_op)
3762 in0_p = ! in0_p, in1_p = ! in1_p;
3763
3764 /* If both expressions are the same, if we can merge the ranges, and we
3765 can build the range test, return it or it inverted. If one of the
3766 ranges is always true or always false, consider it to be the same
3767 expression as the other. */
3768 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3769 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3770 in1_p, low1, high1)
3771 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3772 lhs != 0 ? lhs
3773 : rhs != 0 ? rhs : integer_zero_node,
3774 in_p, low, high))))
3775 return or_op ? invert_truthvalue (tem) : tem;
3776
3777 /* On machines where the branch cost is expensive, if this is a
3778 short-circuited branch and the underlying object on both sides
3779 is the same, make a non-short-circuit operation. */
3780 else if (BRANCH_COST >= 2
3781 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3782 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3783 && operand_equal_p (lhs, rhs, 0))
3784 {
3785 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3786 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3787 which cases we can't do this. */
3788 if (simple_operand_p (lhs))
3789 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3790 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3791 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3792 TREE_OPERAND (exp, 1));
3793
3794 else if (global_bindings_p () == 0
3795 && ! contains_placeholder_p (lhs))
3796 {
3797 tree common = save_expr (lhs);
3798
3799 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3800 or_op ? ! in0_p : in0_p,
3801 low0, high0))
3802 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3803 or_op ? ! in1_p : in1_p,
3804 low1, high1))))
3805 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3806 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3807 TREE_TYPE (exp), lhs, rhs);
3808 }
3809 }
3810
3811 return 0;
3812 }
3813 \f
3814 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3815 bit value. Arrange things so the extra bits will be set to zero if and
3816 only if C is signed-extended to its full width. If MASK is nonzero,
3817 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3818
3819 static tree
3820 unextend (c, p, unsignedp, mask)
3821 tree c;
3822 int p;
3823 int unsignedp;
3824 tree mask;
3825 {
3826 tree type = TREE_TYPE (c);
3827 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3828 tree temp;
3829
3830 if (p == modesize || unsignedp)
3831 return c;
3832
3833 /* We work by getting just the sign bit into the low-order bit, then
3834 into the high-order bit, then sign-extend. We then XOR that value
3835 with C. */
3836 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3837 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3838
3839 /* We must use a signed type in order to get an arithmetic right shift.
3840 However, we must also avoid introducing accidental overflows, so that
3841 a subsequent call to integer_zerop will work. Hence we must
3842 do the type conversion here. At this point, the constant is either
3843 zero or one, and the conversion to a signed type can never overflow.
3844 We could get an overflow if this conversion is done anywhere else. */
3845 if (TREE_UNSIGNED (type))
3846 temp = convert (signed_type (type), temp);
3847
3848 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3849 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3850 if (mask != 0)
3851 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3852 /* If necessary, convert the type back to match the type of C. */
3853 if (TREE_UNSIGNED (type))
3854 temp = convert (type, temp);
3855
3856 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3857 }
3858 \f
3859 /* Find ways of folding logical expressions of LHS and RHS:
3860 Try to merge two comparisons to the same innermost item.
3861 Look for range tests like "ch >= '0' && ch <= '9'".
3862 Look for combinations of simple terms on machines with expensive branches
3863 and evaluate the RHS unconditionally.
3864
3865 For example, if we have p->a == 2 && p->b == 4 and we can make an
3866 object large enough to span both A and B, we can do this with a comparison
3867 against the object ANDed with the a mask.
3868
3869 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3870 operations to do this with one comparison.
3871
3872 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3873 function and the one above.
3874
3875 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3876 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3877
3878 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3879 two operands.
3880
3881 We return the simplified tree or 0 if no optimization is possible. */
3882
3883 static tree
3884 fold_truthop (code, truth_type, lhs, rhs)
3885 enum tree_code code;
3886 tree truth_type, lhs, rhs;
3887 {
3888 /* If this is the "or" of two comparisons, we can do something if we
3889 the comparisons are NE_EXPR. If this is the "and", we can do something
3890 if the comparisons are EQ_EXPR. I.e.,
3891 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3892
3893 WANTED_CODE is this operation code. For single bit fields, we can
3894 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3895 comparison for one-bit fields. */
3896
3897 enum tree_code wanted_code;
3898 enum tree_code lcode, rcode;
3899 tree ll_arg, lr_arg, rl_arg, rr_arg;
3900 tree ll_inner, lr_inner, rl_inner, rr_inner;
3901 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3902 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3903 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3904 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3905 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3906 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3907 enum machine_mode lnmode, rnmode;
3908 tree ll_mask, lr_mask, rl_mask, rr_mask;
3909 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3910 tree l_const, r_const;
3911 tree lntype, rntype, result;
3912 int first_bit, end_bit;
3913 int volatilep;
3914
3915 /* Start by getting the comparison codes. Fail if anything is volatile.
3916 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3917 it were surrounded with a NE_EXPR. */
3918
3919 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3920 return 0;
3921
3922 lcode = TREE_CODE (lhs);
3923 rcode = TREE_CODE (rhs);
3924
3925 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3926 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3927
3928 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3929 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3930
3931 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3932 return 0;
3933
3934 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3935 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3936
3937 ll_arg = TREE_OPERAND (lhs, 0);
3938 lr_arg = TREE_OPERAND (lhs, 1);
3939 rl_arg = TREE_OPERAND (rhs, 0);
3940 rr_arg = TREE_OPERAND (rhs, 1);
3941
3942 /* If the RHS can be evaluated unconditionally and its operands are
3943 simple, it wins to evaluate the RHS unconditionally on machines
3944 with expensive branches. In this case, this isn't a comparison
3945 that can be merged. Avoid doing this if the RHS is a floating-point
3946 comparison since those can trap. */
3947
3948 if (BRANCH_COST >= 2
3949 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3950 && simple_operand_p (rl_arg)
3951 && simple_operand_p (rr_arg))
3952 return build (code, truth_type, lhs, rhs);
3953
3954 /* See if the comparisons can be merged. Then get all the parameters for
3955 each side. */
3956
3957 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3958 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3959 return 0;
3960
3961 volatilep = 0;
3962 ll_inner = decode_field_reference (ll_arg,
3963 &ll_bitsize, &ll_bitpos, &ll_mode,
3964 &ll_unsignedp, &volatilep, &ll_mask,
3965 &ll_and_mask);
3966 lr_inner = decode_field_reference (lr_arg,
3967 &lr_bitsize, &lr_bitpos, &lr_mode,
3968 &lr_unsignedp, &volatilep, &lr_mask,
3969 &lr_and_mask);
3970 rl_inner = decode_field_reference (rl_arg,
3971 &rl_bitsize, &rl_bitpos, &rl_mode,
3972 &rl_unsignedp, &volatilep, &rl_mask,
3973 &rl_and_mask);
3974 rr_inner = decode_field_reference (rr_arg,
3975 &rr_bitsize, &rr_bitpos, &rr_mode,
3976 &rr_unsignedp, &volatilep, &rr_mask,
3977 &rr_and_mask);
3978
3979 /* It must be true that the inner operation on the lhs of each
3980 comparison must be the same if we are to be able to do anything.
3981 Then see if we have constants. If not, the same must be true for
3982 the rhs's. */
3983 if (volatilep || ll_inner == 0 || rl_inner == 0
3984 || ! operand_equal_p (ll_inner, rl_inner, 0))
3985 return 0;
3986
3987 if (TREE_CODE (lr_arg) == INTEGER_CST
3988 && TREE_CODE (rr_arg) == INTEGER_CST)
3989 l_const = lr_arg, r_const = rr_arg;
3990 else if (lr_inner == 0 || rr_inner == 0
3991 || ! operand_equal_p (lr_inner, rr_inner, 0))
3992 return 0;
3993 else
3994 l_const = r_const = 0;
3995
3996 /* If either comparison code is not correct for our logical operation,
3997 fail. However, we can convert a one-bit comparison against zero into
3998 the opposite comparison against that bit being set in the field. */
3999
4000 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4001 if (lcode != wanted_code)
4002 {
4003 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4004 {
4005 /* Make the left operand unsigned, since we are only interested
4006 in the value of one bit. Otherwise we are doing the wrong
4007 thing below. */
4008 ll_unsignedp = 1;
4009 l_const = ll_mask;
4010 }
4011 else
4012 return 0;
4013 }
4014
4015 /* This is analogous to the code for l_const above. */
4016 if (rcode != wanted_code)
4017 {
4018 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4019 {
4020 rl_unsignedp = 1;
4021 r_const = rl_mask;
4022 }
4023 else
4024 return 0;
4025 }
4026
4027 /* See if we can find a mode that contains both fields being compared on
4028 the left. If we can't, fail. Otherwise, update all constants and masks
4029 to be relative to a field of that size. */
4030 first_bit = MIN (ll_bitpos, rl_bitpos);
4031 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4032 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4033 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4034 volatilep);
4035 if (lnmode == VOIDmode)
4036 return 0;
4037
4038 lnbitsize = GET_MODE_BITSIZE (lnmode);
4039 lnbitpos = first_bit & ~ (lnbitsize - 1);
4040 lntype = type_for_size (lnbitsize, 1);
4041 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4042
4043 if (BYTES_BIG_ENDIAN)
4044 {
4045 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4046 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4047 }
4048
4049 ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
4050 size_int (xll_bitpos), 0);
4051 rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
4052 size_int (xrl_bitpos), 0);
4053
4054 if (l_const)
4055 {
4056 l_const = convert (lntype, l_const);
4057 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4058 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4059 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4060 fold (build1 (BIT_NOT_EXPR,
4061 lntype, ll_mask)),
4062 0)))
4063 {
4064 warning ("comparison is always %d", wanted_code == NE_EXPR);
4065
4066 return convert (truth_type,
4067 wanted_code == NE_EXPR
4068 ? integer_one_node : integer_zero_node);
4069 }
4070 }
4071 if (r_const)
4072 {
4073 r_const = convert (lntype, r_const);
4074 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4075 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4076 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4077 fold (build1 (BIT_NOT_EXPR,
4078 lntype, rl_mask)),
4079 0)))
4080 {
4081 warning ("comparison is always %d", wanted_code == NE_EXPR);
4082
4083 return convert (truth_type,
4084 wanted_code == NE_EXPR
4085 ? integer_one_node : integer_zero_node);
4086 }
4087 }
4088
4089 /* If the right sides are not constant, do the same for it. Also,
4090 disallow this optimization if a size or signedness mismatch occurs
4091 between the left and right sides. */
4092 if (l_const == 0)
4093 {
4094 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4095 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4096 /* Make sure the two fields on the right
4097 correspond to the left without being swapped. */
4098 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4099 return 0;
4100
4101 first_bit = MIN (lr_bitpos, rr_bitpos);
4102 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4103 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4104 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4105 volatilep);
4106 if (rnmode == VOIDmode)
4107 return 0;
4108
4109 rnbitsize = GET_MODE_BITSIZE (rnmode);
4110 rnbitpos = first_bit & ~ (rnbitsize - 1);
4111 rntype = type_for_size (rnbitsize, 1);
4112 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4113
4114 if (BYTES_BIG_ENDIAN)
4115 {
4116 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4117 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4118 }
4119
4120 lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
4121 size_int (xlr_bitpos), 0);
4122 rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
4123 size_int (xrr_bitpos), 0);
4124
4125 /* Make a mask that corresponds to both fields being compared.
4126 Do this for both items being compared. If the operands are the
4127 same size and the bits being compared are in the same position
4128 then we can do this by masking both and comparing the masked
4129 results. */
4130 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4131 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4132 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4133 {
4134 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4135 ll_unsignedp || rl_unsignedp);
4136 if (! all_ones_mask_p (ll_mask, lnbitsize))
4137 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
4138
4139 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4140 lr_unsignedp || rr_unsignedp);
4141 if (! all_ones_mask_p (lr_mask, rnbitsize))
4142 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
4143
4144 return build (wanted_code, truth_type, lhs, rhs);
4145 }
4146
4147 /* There is still another way we can do something: If both pairs of
4148 fields being compared are adjacent, we may be able to make a wider
4149 field containing them both.
4150
4151 Note that we still must mask the lhs/rhs expressions. Furthermore,
4152 the mask must be shifted to account for the shift done by
4153 make_bit_field_ref. */
4154 if ((ll_bitsize + ll_bitpos == rl_bitpos
4155 && lr_bitsize + lr_bitpos == rr_bitpos)
4156 || (ll_bitpos == rl_bitpos + rl_bitsize
4157 && lr_bitpos == rr_bitpos + rr_bitsize))
4158 {
4159 tree type;
4160
4161 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4162 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4163 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4164 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4165
4166 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4167 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4168 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4169 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4170
4171 /* Convert to the smaller type before masking out unwanted bits. */
4172 type = lntype;
4173 if (lntype != rntype)
4174 {
4175 if (lnbitsize > rnbitsize)
4176 {
4177 lhs = convert (rntype, lhs);
4178 ll_mask = convert (rntype, ll_mask);
4179 type = rntype;
4180 }
4181 else if (lnbitsize < rnbitsize)
4182 {
4183 rhs = convert (lntype, rhs);
4184 lr_mask = convert (lntype, lr_mask);
4185 type = lntype;
4186 }
4187 }
4188
4189 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4190 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
4191
4192 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4193 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
4194
4195 return build (wanted_code, truth_type, lhs, rhs);
4196 }
4197
4198 return 0;
4199 }
4200
4201 /* Handle the case of comparisons with constants. If there is something in
4202 common between the masks, those bits of the constants must be the same.
4203 If not, the condition is always false. Test for this to avoid generating
4204 incorrect code below. */
4205 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4206 if (! integer_zerop (result)
4207 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4208 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4209 {
4210 if (wanted_code == NE_EXPR)
4211 {
4212 warning ("`or' of unmatched not-equal tests is always 1");
4213 return convert (truth_type, integer_one_node);
4214 }
4215 else
4216 {
4217 warning ("`and' of mutually exclusive equal-tests is always 0");
4218 return convert (truth_type, integer_zero_node);
4219 }
4220 }
4221
4222 /* Construct the expression we will return. First get the component
4223 reference we will make. Unless the mask is all ones the width of
4224 that field, perform the mask operation. Then compare with the
4225 merged constant. */
4226 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4227 ll_unsignedp || rl_unsignedp);
4228
4229 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4230 if (! all_ones_mask_p (ll_mask, lnbitsize))
4231 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
4232
4233 return build (wanted_code, truth_type, result,
4234 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4235 }
4236 \f
4237 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4238 constant. */
4239
4240 static tree
4241 optimize_minmax_comparison (t)
4242 tree t;
4243 {
4244 tree type = TREE_TYPE (t);
4245 tree arg0 = TREE_OPERAND (t, 0);
4246 enum tree_code op_code;
4247 tree comp_const = TREE_OPERAND (t, 1);
4248 tree minmax_const;
4249 int consts_equal, consts_lt;
4250 tree inner;
4251
4252 STRIP_SIGN_NOPS (arg0);
4253
4254 op_code = TREE_CODE (arg0);
4255 minmax_const = TREE_OPERAND (arg0, 1);
4256 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4257 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4258 inner = TREE_OPERAND (arg0, 0);
4259
4260 /* If something does not permit us to optimize, return the original tree. */
4261 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4262 || TREE_CODE (comp_const) != INTEGER_CST
4263 || TREE_CONSTANT_OVERFLOW (comp_const)
4264 || TREE_CODE (minmax_const) != INTEGER_CST
4265 || TREE_CONSTANT_OVERFLOW (minmax_const))
4266 return t;
4267
4268 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4269 and GT_EXPR, doing the rest with recursive calls using logical
4270 simplifications. */
4271 switch (TREE_CODE (t))
4272 {
4273 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4274 return
4275 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4276
4277 case GE_EXPR:
4278 return
4279 fold (build (TRUTH_ORIF_EXPR, type,
4280 optimize_minmax_comparison
4281 (build (EQ_EXPR, type, arg0, comp_const)),
4282 optimize_minmax_comparison
4283 (build (GT_EXPR, type, arg0, comp_const))));
4284
4285 case EQ_EXPR:
4286 if (op_code == MAX_EXPR && consts_equal)
4287 /* MAX (X, 0) == 0 -> X <= 0 */
4288 return fold (build (LE_EXPR, type, inner, comp_const));
4289
4290 else if (op_code == MAX_EXPR && consts_lt)
4291 /* MAX (X, 0) == 5 -> X == 5 */
4292 return fold (build (EQ_EXPR, type, inner, comp_const));
4293
4294 else if (op_code == MAX_EXPR)
4295 /* MAX (X, 0) == -1 -> false */
4296 return omit_one_operand (type, integer_zero_node, inner);
4297
4298 else if (consts_equal)
4299 /* MIN (X, 0) == 0 -> X >= 0 */
4300 return fold (build (GE_EXPR, type, inner, comp_const));
4301
4302 else if (consts_lt)
4303 /* MIN (X, 0) == 5 -> false */
4304 return omit_one_operand (type, integer_zero_node, inner);
4305
4306 else
4307 /* MIN (X, 0) == -1 -> X == -1 */
4308 return fold (build (EQ_EXPR, type, inner, comp_const));
4309
4310 case GT_EXPR:
4311 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4312 /* MAX (X, 0) > 0 -> X > 0
4313 MAX (X, 0) > 5 -> X > 5 */
4314 return fold (build (GT_EXPR, type, inner, comp_const));
4315
4316 else if (op_code == MAX_EXPR)
4317 /* MAX (X, 0) > -1 -> true */
4318 return omit_one_operand (type, integer_one_node, inner);
4319
4320 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4321 /* MIN (X, 0) > 0 -> false
4322 MIN (X, 0) > 5 -> false */
4323 return omit_one_operand (type, integer_zero_node, inner);
4324
4325 else
4326 /* MIN (X, 0) > -1 -> X > -1 */
4327 return fold (build (GT_EXPR, type, inner, comp_const));
4328
4329 default:
4330 return t;
4331 }
4332 }
4333 \f
4334 /* T is an integer expression that is being multiplied, divided, or taken a
4335 modulus (CODE says which and what kind of divide or modulus) by a
4336 constant C. See if we can eliminate that operation by folding it with
4337 other operations already in T. WIDE_TYPE, if non-null, is a type that
4338 should be used for the computation if wider than our type.
4339
4340 For example, if we are dividing (X * 8) + (Y + 16) by 4, we can return
4341 (X * 2) + (Y + 4). We must, however, be assured that either the original
4342 expression would not overflow or that overflow is undefined for the type
4343 in the language in question.
4344
4345 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4346 the machine has a multiply-accumulate insn or that this is part of an
4347 addressing calculation.
4348
4349 If we return a non-null expression, it is an equivalent form of the
4350 original computation, but need not be in the original type. */
4351
4352 static tree
4353 extract_muldiv (t, c, code, wide_type)
4354 tree t;
4355 tree c;
4356 enum tree_code code;
4357 tree wide_type;
4358 {
4359 tree type = TREE_TYPE (t);
4360 enum tree_code tcode = TREE_CODE (t);
4361 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4362 > GET_MODE_SIZE (TYPE_MODE (type)))
4363 ? wide_type : type);
4364 tree t1, t2;
4365 int same_p = tcode == code;
4366 tree op0 = NULL_TREE, op1 = NULL_TREE;
4367
4368 /* Don't deal with constants of zero here; they confuse the code below. */
4369 if (integer_zerop (c))
4370 return NULL_TREE;
4371
4372 if (TREE_CODE_CLASS (tcode) == '1')
4373 op0 = TREE_OPERAND (t, 0);
4374
4375 if (TREE_CODE_CLASS (tcode) == '2')
4376 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4377
4378 /* Note that we need not handle conditional operations here since fold
4379 already handles those cases. So just do arithmetic here. */
4380 switch (tcode)
4381 {
4382 case INTEGER_CST:
4383 /* For a constant, we can always simplify if we are a multiply
4384 or (for divide and modulus) if it is a multiple of our constant. */
4385 if (code == MULT_EXPR
4386 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4387 return const_binop (code, convert (ctype, t), convert (ctype, c), 0);
4388 break;
4389
4390 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4391 /* Pass the constant down and see if we can make a simplification. If
4392 we can, replace this expression with the inner simplification for
4393 possible later conversion to our or some other type. */
4394 if (0 != (t1 = extract_muldiv (op0, convert (TREE_TYPE (op0), c), code,
4395 code == MULT_EXPR ? ctype : NULL_TREE)))
4396 return t1;
4397 break;
4398
4399 case NEGATE_EXPR: case ABS_EXPR:
4400 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4401 return fold (build1 (tcode, ctype, convert (ctype, t1)));
4402 break;
4403
4404 case MIN_EXPR: case MAX_EXPR:
4405 /* If widening the type changes the signedness, then we can't perform
4406 this optimization as that changes the result. */
4407 if (ctype != type && TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4408 break;
4409
4410 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4411 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4412 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4413 {
4414 if (tree_int_cst_sgn (c) < 0)
4415 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4416
4417 return fold (build (tcode, ctype, convert (ctype, t1),
4418 convert (ctype, t2)));
4419 }
4420 break;
4421
4422 case WITH_RECORD_EXPR:
4423 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4424 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4425 TREE_OPERAND (t, 1));
4426 break;
4427
4428 case SAVE_EXPR:
4429 /* If this has not been evaluated and the operand has no side effects,
4430 we can see if we can do something inside it and make a new one.
4431 Note that this test is overly conservative since we can do this
4432 if the only reason it had side effects is that it was another
4433 similar SAVE_EXPR, but that isn't worth bothering with. */
4434 if (SAVE_EXPR_RTL (t) == 0 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0))
4435 && 0 != (t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code,
4436 wide_type)))
4437 return save_expr (t1);
4438 break;
4439
4440 case LSHIFT_EXPR: case RSHIFT_EXPR:
4441 /* If the second operand is constant, this is a multiplication
4442 or floor division, by a power of two, so we can treat it that
4443 way unless the multiplier or divisor overflows. */
4444 if (TREE_CODE (op1) == INTEGER_CST
4445 && 0 != (t1 = convert (ctype,
4446 const_binop (LSHIFT_EXPR, size_one_node,
4447 op1, 0)))
4448 && ! TREE_OVERFLOW (t1))
4449 return extract_muldiv (build (tcode == LSHIFT_EXPR
4450 ? MULT_EXPR : FLOOR_DIV_EXPR,
4451 ctype, convert (ctype, op0), t1),
4452 c, code, wide_type);
4453 break;
4454
4455 case PLUS_EXPR: case MINUS_EXPR:
4456 /* See if we can eliminate the operation on both sides. If we can, we
4457 can return a new PLUS or MINUS. If we can't, the only remaining
4458 cases where we can do anything are if the second operand is a
4459 constant. */
4460 t1 = extract_muldiv (op0, c, code, wide_type);
4461 t2 = extract_muldiv (op1, c, code, wide_type);
4462 if (t1 != 0 && t2 != 0)
4463 return fold (build (tcode, ctype, convert (ctype, t1),
4464 convert (ctype, t2)));
4465
4466 /* If this was a subtraction, negate OP1 and set it to be an addition.
4467 This simplifies the logic below. */
4468 if (tcode == MINUS_EXPR)
4469 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4470
4471 if (TREE_CODE (op1) != INTEGER_CST)
4472 break;
4473
4474 /* If either OP1 or C are negative, this optimization is not safe for
4475 some of the division and remainder types while for others we need
4476 to change the code. */
4477 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4478 {
4479 if (code == CEIL_DIV_EXPR)
4480 code = FLOOR_DIV_EXPR;
4481 else if (code == CEIL_MOD_EXPR)
4482 code = FLOOR_MOD_EXPR;
4483 else if (code == FLOOR_DIV_EXPR)
4484 code = CEIL_DIV_EXPR;
4485 else if (code == FLOOR_MOD_EXPR)
4486 code = CEIL_MOD_EXPR;
4487 else if (code != MULT_EXPR)
4488 break;
4489 }
4490
4491 /* Now do the operation and verify it doesn't overflow. */
4492 op1 = const_binop (code, convert (ctype, op1), convert (ctype, c), 0);
4493 if (op1 == 0 || TREE_OVERFLOW (op1))
4494 break;
4495
4496 /* If we have an unsigned type is not a sizetype, we cannot widen
4497 the operation since it will change the result if the original
4498 computation overflowed. */
4499 if (TREE_UNSIGNED (ctype)
4500 && ! TYPE_IS_SIZETYPE (ctype)
4501 && ctype != type)
4502 break;
4503
4504 /* If we were able to eliminate our operation from the first side,
4505 apply our operation to the second side and reform the PLUS. */
4506 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4507 return fold (build (tcode, ctype, convert (ctype, t1), op1));
4508
4509 /* The last case is if we are a multiply. In that case, we can
4510 apply the distributive law to commute the multiply and addition
4511 if the multiplication of the constants doesn't overflow. */
4512 if (code == MULT_EXPR)
4513 return fold (build (tcode, ctype, fold (build (code, ctype,
4514 convert (ctype, op0),
4515 convert (ctype, c))),
4516 op1));
4517
4518 break;
4519
4520 case MULT_EXPR:
4521 /* We have a special case here if we are doing something like
4522 (C * 8) % 4 since we know that's zero. */
4523 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4524 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4525 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4526 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4527 return omit_one_operand (type, integer_zero_node, op0);
4528
4529 /* ... fall through ... */
4530
4531 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4532 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4533 /* If we can extract our operation from the LHS, do so and return a
4534 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4535 do something only if the second operand is a constant. */
4536 if (same_p
4537 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4538 return fold (build (tcode, ctype, convert (ctype, t1),
4539 convert (ctype, op1)));
4540 else if (tcode == MULT_EXPR && code == MULT_EXPR
4541 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4542 return fold (build (tcode, ctype, convert (ctype, op0),
4543 convert (ctype, t1)));
4544 else if (TREE_CODE (op1) != INTEGER_CST)
4545 return 0;
4546
4547 /* If these are the same operation types, we can associate them
4548 assuming no overflow. */
4549 if (tcode == code
4550 && 0 != (t1 = const_binop (MULT_EXPR, convert (ctype, op1),
4551 convert (ctype, c), 0))
4552 && ! TREE_OVERFLOW (t1))
4553 return fold (build (tcode, ctype, convert (ctype, op0), t1));
4554
4555 /* If these operations "cancel" each other, we have the main
4556 optimizations of this pass, which occur when either constant is a
4557 multiple of the other, in which case we replace this with either an
4558 operation or CODE or TCODE.
4559
4560 If we have an unsigned type that is not a sizetype, we canot do
4561 this since it will change the result if the original computation
4562 overflowed. */
4563 if ((! TREE_UNSIGNED (ctype)
4564 || (TREE_CODE (ctype) == INTEGER_TYPE
4565 && TYPE_IS_SIZETYPE (ctype)))
4566 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4567 || (tcode == MULT_EXPR
4568 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4569 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4570 {
4571 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4572 return fold (build (tcode, ctype, convert (ctype, op0),
4573 convert (ctype,
4574 const_binop (TRUNC_DIV_EXPR,
4575 op1, c, 0))));
4576 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4577 return fold (build (code, ctype, convert (ctype, op0),
4578 convert (ctype,
4579 const_binop (TRUNC_DIV_EXPR,
4580 c, op1, 0))));
4581 }
4582 break;
4583
4584 default:
4585 break;
4586 }
4587
4588 return 0;
4589 }
4590 \f
4591 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4592 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4593 that we may sometimes modify the tree. */
4594
4595 static tree
4596 strip_compound_expr (t, s)
4597 tree t;
4598 tree s;
4599 {
4600 enum tree_code code = TREE_CODE (t);
4601
4602 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4603 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4604 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4605 return TREE_OPERAND (t, 1);
4606
4607 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4608 don't bother handling any other types. */
4609 else if (code == COND_EXPR)
4610 {
4611 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4612 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4613 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4614 }
4615 else if (TREE_CODE_CLASS (code) == '1')
4616 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4617 else if (TREE_CODE_CLASS (code) == '<'
4618 || TREE_CODE_CLASS (code) == '2')
4619 {
4620 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4621 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4622 }
4623
4624 return t;
4625 }
4626 \f
4627 /* Return a node which has the indicated constant VALUE (either 0 or
4628 1), and is of the indicated TYPE. */
4629
4630 static tree
4631 constant_boolean_node (value, type)
4632 int value;
4633 tree type;
4634 {
4635 if (type == integer_type_node)
4636 return value ? integer_one_node : integer_zero_node;
4637 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4638 return truthvalue_conversion (value ? integer_one_node :
4639 integer_zero_node);
4640 else
4641 {
4642 tree t = build_int_2 (value, 0);
4643
4644 TREE_TYPE (t) = type;
4645 return t;
4646 }
4647 }
4648
4649 /* Utility function for the following routine, to see how complex a nesting of
4650 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4651 we don't care (to avoid spending too much time on complex expressions.). */
4652
4653 static int
4654 count_cond (expr, lim)
4655 tree expr;
4656 int lim;
4657 {
4658 int true, false;
4659
4660 if (TREE_CODE (expr) != COND_EXPR)
4661 return 0;
4662 else if (lim <= 0)
4663 return 0;
4664
4665 true = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4666 false = count_cond (TREE_OPERAND (expr, 2), lim - 1 - true);
4667 return MIN (lim, 1 + true + false);
4668 }
4669 \f
4670 /* Perform constant folding and related simplification of EXPR.
4671 The related simplifications include x*1 => x, x*0 => 0, etc.,
4672 and application of the associative law.
4673 NOP_EXPR conversions may be removed freely (as long as we
4674 are careful not to change the C type of the overall expression)
4675 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4676 but we can constant-fold them if they have constant operands. */
4677
4678 tree
4679 fold (expr)
4680 tree expr;
4681 {
4682 register tree t = expr;
4683 tree t1 = NULL_TREE;
4684 tree tem;
4685 tree type = TREE_TYPE (expr);
4686 register tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4687 register enum tree_code code = TREE_CODE (t);
4688 register int kind;
4689 int invert;
4690 /* WINS will be nonzero when the switch is done
4691 if all operands are constant. */
4692 int wins = 1;
4693
4694 /* Don't try to process an RTL_EXPR since its operands aren't trees.
4695 Likewise for a SAVE_EXPR that's already been evaluated. */
4696 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t)) != 0)
4697 return t;
4698
4699 /* Return right away if already constant. */
4700 if (TREE_CONSTANT (t))
4701 {
4702 if (code == CONST_DECL)
4703 return DECL_INITIAL (t);
4704 return t;
4705 }
4706
4707 #ifdef MAX_INTEGER_COMPUTATION_MODE
4708 check_max_integer_computation_mode (expr);
4709 #endif
4710
4711 kind = TREE_CODE_CLASS (code);
4712 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
4713 {
4714 tree subop;
4715
4716 /* Special case for conversion ops that can have fixed point args. */
4717 arg0 = TREE_OPERAND (t, 0);
4718
4719 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
4720 if (arg0 != 0)
4721 STRIP_SIGN_NOPS (arg0);
4722
4723 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
4724 subop = TREE_REALPART (arg0);
4725 else
4726 subop = arg0;
4727
4728 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
4729 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4730 && TREE_CODE (subop) != REAL_CST
4731 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4732 )
4733 /* Note that TREE_CONSTANT isn't enough:
4734 static var addresses are constant but we can't
4735 do arithmetic on them. */
4736 wins = 0;
4737 }
4738 else if (kind == 'e' || kind == '<'
4739 || kind == '1' || kind == '2' || kind == 'r')
4740 {
4741 register int len = tree_code_length[(int) code];
4742 register int i;
4743 for (i = 0; i < len; i++)
4744 {
4745 tree op = TREE_OPERAND (t, i);
4746 tree subop;
4747
4748 if (op == 0)
4749 continue; /* Valid for CALL_EXPR, at least. */
4750
4751 if (kind == '<' || code == RSHIFT_EXPR)
4752 {
4753 /* Signedness matters here. Perhaps we can refine this
4754 later. */
4755 STRIP_SIGN_NOPS (op);
4756 }
4757 else
4758 /* Strip any conversions that don't change the mode. */
4759 STRIP_NOPS (op);
4760
4761 if (TREE_CODE (op) == COMPLEX_CST)
4762 subop = TREE_REALPART (op);
4763 else
4764 subop = op;
4765
4766 if (TREE_CODE (subop) != INTEGER_CST
4767 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4768 && TREE_CODE (subop) != REAL_CST
4769 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4770 )
4771 /* Note that TREE_CONSTANT isn't enough:
4772 static var addresses are constant but we can't
4773 do arithmetic on them. */
4774 wins = 0;
4775
4776 if (i == 0)
4777 arg0 = op;
4778 else if (i == 1)
4779 arg1 = op;
4780 }
4781 }
4782
4783 /* If this is a commutative operation, and ARG0 is a constant, move it
4784 to ARG1 to reduce the number of tests below. */
4785 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
4786 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
4787 || code == BIT_AND_EXPR)
4788 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
4789 {
4790 tem = arg0; arg0 = arg1; arg1 = tem;
4791
4792 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
4793 TREE_OPERAND (t, 1) = tem;
4794 }
4795
4796 /* Now WINS is set as described above,
4797 ARG0 is the first operand of EXPR,
4798 and ARG1 is the second operand (if it has more than one operand).
4799
4800 First check for cases where an arithmetic operation is applied to a
4801 compound, conditional, or comparison operation. Push the arithmetic
4802 operation inside the compound or conditional to see if any folding
4803 can then be done. Convert comparison to conditional for this purpose.
4804 The also optimizes non-constant cases that used to be done in
4805 expand_expr.
4806
4807 Before we do that, see if this is a BIT_AND_EXPR or a BIT_OR_EXPR,
4808 one of the operands is a comparison and the other is a comparison, a
4809 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
4810 code below would make the expression more complex. Change it to a
4811 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
4812 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
4813
4814 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
4815 || code == EQ_EXPR || code == NE_EXPR)
4816 && ((truth_value_p (TREE_CODE (arg0))
4817 && (truth_value_p (TREE_CODE (arg1))
4818 || (TREE_CODE (arg1) == BIT_AND_EXPR
4819 && integer_onep (TREE_OPERAND (arg1, 1)))))
4820 || (truth_value_p (TREE_CODE (arg1))
4821 && (truth_value_p (TREE_CODE (arg0))
4822 || (TREE_CODE (arg0) == BIT_AND_EXPR
4823 && integer_onep (TREE_OPERAND (arg0, 1)))))))
4824 {
4825 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
4826 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
4827 : TRUTH_XOR_EXPR,
4828 type, arg0, arg1));
4829
4830 if (code == EQ_EXPR)
4831 t = invert_truthvalue (t);
4832
4833 return t;
4834 }
4835
4836 if (TREE_CODE_CLASS (code) == '1')
4837 {
4838 if (TREE_CODE (arg0) == COMPOUND_EXPR)
4839 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4840 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
4841 else if (TREE_CODE (arg0) == COND_EXPR)
4842 {
4843 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
4844 fold (build1 (code, type, TREE_OPERAND (arg0, 1))),
4845 fold (build1 (code, type, TREE_OPERAND (arg0, 2)))));
4846
4847 /* If this was a conversion, and all we did was to move into
4848 inside the COND_EXPR, bring it back out. But leave it if
4849 it is a conversion from integer to integer and the
4850 result precision is no wider than a word since such a
4851 conversion is cheap and may be optimized away by combine,
4852 while it couldn't if it were outside the COND_EXPR. Then return
4853 so we don't get into an infinite recursion loop taking the
4854 conversion out and then back in. */
4855
4856 if ((code == NOP_EXPR || code == CONVERT_EXPR
4857 || code == NON_LVALUE_EXPR)
4858 && TREE_CODE (t) == COND_EXPR
4859 && TREE_CODE (TREE_OPERAND (t, 1)) == code
4860 && TREE_CODE (TREE_OPERAND (t, 2)) == code
4861 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
4862 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
4863 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
4864 && (INTEGRAL_TYPE_P
4865 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
4866 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
4867 t = build1 (code, type,
4868 build (COND_EXPR,
4869 TREE_TYPE (TREE_OPERAND
4870 (TREE_OPERAND (t, 1), 0)),
4871 TREE_OPERAND (t, 0),
4872 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
4873 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
4874 return t;
4875 }
4876 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
4877 return fold (build (COND_EXPR, type, arg0,
4878 fold (build1 (code, type, integer_one_node)),
4879 fold (build1 (code, type, integer_zero_node))));
4880 }
4881 else if (TREE_CODE_CLASS (code) == '2'
4882 || TREE_CODE_CLASS (code) == '<')
4883 {
4884 if (TREE_CODE (arg1) == COMPOUND_EXPR)
4885 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4886 fold (build (code, type,
4887 arg0, TREE_OPERAND (arg1, 1))));
4888 else if ((TREE_CODE (arg1) == COND_EXPR
4889 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
4890 && TREE_CODE_CLASS (code) != '<'))
4891 && (TREE_CODE (arg0) != COND_EXPR
4892 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4893 && (! TREE_SIDE_EFFECTS (arg0)
4894 || (global_bindings_p () == 0
4895 && ! contains_placeholder_p (arg0))))
4896 {
4897 tree test, true_value, false_value;
4898 tree lhs = 0, rhs = 0;
4899
4900 if (TREE_CODE (arg1) == COND_EXPR)
4901 {
4902 test = TREE_OPERAND (arg1, 0);
4903 true_value = TREE_OPERAND (arg1, 1);
4904 false_value = TREE_OPERAND (arg1, 2);
4905 }
4906 else
4907 {
4908 tree testtype = TREE_TYPE (arg1);
4909 test = arg1;
4910 true_value = convert (testtype, integer_one_node);
4911 false_value = convert (testtype, integer_zero_node);
4912 }
4913
4914 /* If ARG0 is complex we want to make sure we only evaluate
4915 it once. Though this is only required if it is volatile, it
4916 might be more efficient even if it is not. However, if we
4917 succeed in folding one part to a constant, we do not need
4918 to make this SAVE_EXPR. Since we do this optimization
4919 primarily to see if we do end up with constant and this
4920 SAVE_EXPR interferes with later optimizations, suppressing
4921 it when we can is important.
4922
4923 If we are not in a function, we can't make a SAVE_EXPR, so don't
4924 try to do so. Don't try to see if the result is a constant
4925 if an arm is a COND_EXPR since we get exponential behavior
4926 in that case. */
4927
4928 if (TREE_CODE (arg0) != SAVE_EXPR && ! TREE_CONSTANT (arg0)
4929 && global_bindings_p () == 0
4930 && ((TREE_CODE (arg0) != VAR_DECL
4931 && TREE_CODE (arg0) != PARM_DECL)
4932 || TREE_SIDE_EFFECTS (arg0)))
4933 {
4934 if (TREE_CODE (true_value) != COND_EXPR)
4935 lhs = fold (build (code, type, arg0, true_value));
4936
4937 if (TREE_CODE (false_value) != COND_EXPR)
4938 rhs = fold (build (code, type, arg0, false_value));
4939
4940 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4941 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4942 arg0 = save_expr (arg0), lhs = rhs = 0;
4943 }
4944
4945 if (lhs == 0)
4946 lhs = fold (build (code, type, arg0, true_value));
4947 if (rhs == 0)
4948 rhs = fold (build (code, type, arg0, false_value));
4949
4950 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4951
4952 if (TREE_CODE (arg0) == SAVE_EXPR)
4953 return build (COMPOUND_EXPR, type,
4954 convert (void_type_node, arg0),
4955 strip_compound_expr (test, arg0));
4956 else
4957 return convert (type, test);
4958 }
4959
4960 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
4961 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4962 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4963 else if ((TREE_CODE (arg0) == COND_EXPR
4964 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4965 && TREE_CODE_CLASS (code) != '<'))
4966 && (TREE_CODE (arg1) != COND_EXPR
4967 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4968 && (! TREE_SIDE_EFFECTS (arg1)
4969 || (global_bindings_p () == 0
4970 && ! contains_placeholder_p (arg1))))
4971 {
4972 tree test, true_value, false_value;
4973 tree lhs = 0, rhs = 0;
4974
4975 if (TREE_CODE (arg0) == COND_EXPR)
4976 {
4977 test = TREE_OPERAND (arg0, 0);
4978 true_value = TREE_OPERAND (arg0, 1);
4979 false_value = TREE_OPERAND (arg0, 2);
4980 }
4981 else
4982 {
4983 tree testtype = TREE_TYPE (arg0);
4984 test = arg0;
4985 true_value = convert (testtype, integer_one_node);
4986 false_value = convert (testtype, integer_zero_node);
4987 }
4988
4989 if (TREE_CODE (arg1) != SAVE_EXPR && ! TREE_CONSTANT (arg0)
4990 && global_bindings_p () == 0
4991 && ((TREE_CODE (arg1) != VAR_DECL
4992 && TREE_CODE (arg1) != PARM_DECL)
4993 || TREE_SIDE_EFFECTS (arg1)))
4994 {
4995 if (TREE_CODE (true_value) != COND_EXPR)
4996 lhs = fold (build (code, type, true_value, arg1));
4997
4998 if (TREE_CODE (false_value) != COND_EXPR)
4999 rhs = fold (build (code, type, false_value, arg1));
5000
5001 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
5002 && (rhs == 0 || !TREE_CONSTANT (rhs)))
5003 arg1 = save_expr (arg1), lhs = rhs = 0;
5004 }
5005
5006 if (lhs == 0)
5007 lhs = fold (build (code, type, true_value, arg1));
5008
5009 if (rhs == 0)
5010 rhs = fold (build (code, type, false_value, arg1));
5011
5012 test = fold (build (COND_EXPR, type, test, lhs, rhs));
5013 if (TREE_CODE (arg1) == SAVE_EXPR)
5014 return build (COMPOUND_EXPR, type,
5015 convert (void_type_node, arg1),
5016 strip_compound_expr (test, arg1));
5017 else
5018 return convert (type, test);
5019 }
5020 }
5021 else if (TREE_CODE_CLASS (code) == '<'
5022 && TREE_CODE (arg0) == COMPOUND_EXPR)
5023 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5024 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5025 else if (TREE_CODE_CLASS (code) == '<'
5026 && TREE_CODE (arg1) == COMPOUND_EXPR)
5027 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5028 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5029
5030 switch (code)
5031 {
5032 case INTEGER_CST:
5033 case REAL_CST:
5034 case STRING_CST:
5035 case COMPLEX_CST:
5036 case CONSTRUCTOR:
5037 return t;
5038
5039 case CONST_DECL:
5040 return fold (DECL_INITIAL (t));
5041
5042 case NOP_EXPR:
5043 case FLOAT_EXPR:
5044 case CONVERT_EXPR:
5045 case FIX_TRUNC_EXPR:
5046 /* Other kinds of FIX are not handled properly by fold_convert. */
5047
5048 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
5049 return TREE_OPERAND (t, 0);
5050
5051 /* Handle cases of two conversions in a row. */
5052 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5053 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5054 {
5055 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5056 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5057 tree final_type = TREE_TYPE (t);
5058 int inside_int = INTEGRAL_TYPE_P (inside_type);
5059 int inside_ptr = POINTER_TYPE_P (inside_type);
5060 int inside_float = FLOAT_TYPE_P (inside_type);
5061 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5062 int inside_unsignedp = TREE_UNSIGNED (inside_type);
5063 int inter_int = INTEGRAL_TYPE_P (inter_type);
5064 int inter_ptr = POINTER_TYPE_P (inter_type);
5065 int inter_float = FLOAT_TYPE_P (inter_type);
5066 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5067 int inter_unsignedp = TREE_UNSIGNED (inter_type);
5068 int final_int = INTEGRAL_TYPE_P (final_type);
5069 int final_ptr = POINTER_TYPE_P (final_type);
5070 int final_float = FLOAT_TYPE_P (final_type);
5071 unsigned int final_prec = TYPE_PRECISION (final_type);
5072 int final_unsignedp = TREE_UNSIGNED (final_type);
5073
5074 /* In addition to the cases of two conversions in a row
5075 handled below, if we are converting something to its own
5076 type via an object of identical or wider precision, neither
5077 conversion is needed. */
5078 if (inside_type == final_type
5079 && ((inter_int && final_int) || (inter_float && final_float))
5080 && inter_prec >= final_prec)
5081 return TREE_OPERAND (TREE_OPERAND (t, 0), 0);
5082
5083 /* Likewise, if the intermediate and final types are either both
5084 float or both integer, we don't need the middle conversion if
5085 it is wider than the final type and doesn't change the signedness
5086 (for integers). Avoid this if the final type is a pointer
5087 since then we sometimes need the inner conversion. Likewise if
5088 the outer has a precision not equal to the size of its mode. */
5089 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5090 || (inter_float && inside_float))
5091 && inter_prec >= inside_prec
5092 && (inter_float || inter_unsignedp == inside_unsignedp)
5093 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5094 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5095 && ! final_ptr)
5096 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5097
5098 /* If we have a sign-extension of a zero-extended value, we can
5099 replace that by a single zero-extension. */
5100 if (inside_int && inter_int && final_int
5101 && inside_prec < inter_prec && inter_prec < final_prec
5102 && inside_unsignedp && !inter_unsignedp)
5103 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5104
5105 /* Two conversions in a row are not needed unless:
5106 - some conversion is floating-point (overstrict for now), or
5107 - the intermediate type is narrower than both initial and
5108 final, or
5109 - the intermediate type and innermost type differ in signedness,
5110 and the outermost type is wider than the intermediate, or
5111 - the initial type is a pointer type and the precisions of the
5112 intermediate and final types differ, or
5113 - the final type is a pointer type and the precisions of the
5114 initial and intermediate types differ. */
5115 if (! inside_float && ! inter_float && ! final_float
5116 && (inter_prec > inside_prec || inter_prec > final_prec)
5117 && ! (inside_int && inter_int
5118 && inter_unsignedp != inside_unsignedp
5119 && inter_prec < final_prec)
5120 && ((inter_unsignedp && inter_prec > inside_prec)
5121 == (final_unsignedp && final_prec > inter_prec))
5122 && ! (inside_ptr && inter_prec != final_prec)
5123 && ! (final_ptr && inside_prec != inter_prec)
5124 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5125 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5126 && ! final_ptr)
5127 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5128 }
5129
5130 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5131 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5132 /* Detect assigning a bitfield. */
5133 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5134 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5135 {
5136 /* Don't leave an assignment inside a conversion
5137 unless assigning a bitfield. */
5138 tree prev = TREE_OPERAND (t, 0);
5139 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
5140 /* First do the assignment, then return converted constant. */
5141 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
5142 TREE_USED (t) = 1;
5143 return t;
5144 }
5145 if (!wins)
5146 {
5147 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
5148 return t;
5149 }
5150 return fold_convert (t, arg0);
5151
5152 #if 0 /* This loses on &"foo"[0]. */
5153 case ARRAY_REF:
5154 {
5155 int i;
5156
5157 /* Fold an expression like: "foo"[2] */
5158 if (TREE_CODE (arg0) == STRING_CST
5159 && TREE_CODE (arg1) == INTEGER_CST
5160 && compare_tree_int (arg1, TREE_STRING_LENGTH (arg0)) < 0)
5161 {
5162 t = build_int_2 (TREE_STRING_POINTER (arg0)[TREE_INT_CST_LOW (arg))], 0);
5163 TREE_TYPE (t) = TREE_TYPE (TREE_TYPE (arg0));
5164 force_fit_type (t, 0);
5165 }
5166 }
5167 return t;
5168 #endif /* 0 */
5169
5170 case COMPONENT_REF:
5171 if (TREE_CODE (arg0) == CONSTRUCTOR)
5172 {
5173 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5174 if (m)
5175 t = TREE_VALUE (m);
5176 }
5177 return t;
5178
5179 case RANGE_EXPR:
5180 TREE_CONSTANT (t) = wins;
5181 return t;
5182
5183 case NEGATE_EXPR:
5184 if (wins)
5185 {
5186 if (TREE_CODE (arg0) == INTEGER_CST)
5187 {
5188 HOST_WIDE_INT low, high;
5189 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5190 TREE_INT_CST_HIGH (arg0),
5191 &low, &high);
5192 t = build_int_2 (low, high);
5193 TREE_TYPE (t) = type;
5194 TREE_OVERFLOW (t)
5195 = (TREE_OVERFLOW (arg0)
5196 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
5197 TREE_CONSTANT_OVERFLOW (t)
5198 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5199 }
5200 else if (TREE_CODE (arg0) == REAL_CST)
5201 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5202 }
5203 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5204 return TREE_OPERAND (arg0, 0);
5205
5206 /* Convert - (a - b) to (b - a) for non-floating-point. */
5207 else if (TREE_CODE (arg0) == MINUS_EXPR
5208 && (! FLOAT_TYPE_P (type) || flag_fast_math))
5209 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
5210 TREE_OPERAND (arg0, 0));
5211
5212 return t;
5213
5214 case ABS_EXPR:
5215 if (wins)
5216 {
5217 if (TREE_CODE (arg0) == INTEGER_CST)
5218 {
5219 if (! TREE_UNSIGNED (type)
5220 && TREE_INT_CST_HIGH (arg0) < 0)
5221 {
5222 HOST_WIDE_INT low, high;
5223 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5224 TREE_INT_CST_HIGH (arg0),
5225 &low, &high);
5226 t = build_int_2 (low, high);
5227 TREE_TYPE (t) = type;
5228 TREE_OVERFLOW (t)
5229 = (TREE_OVERFLOW (arg0)
5230 | force_fit_type (t, overflow));
5231 TREE_CONSTANT_OVERFLOW (t)
5232 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5233 }
5234 }
5235 else if (TREE_CODE (arg0) == REAL_CST)
5236 {
5237 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5238 t = build_real (type,
5239 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5240 }
5241 }
5242 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
5243 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
5244 return t;
5245
5246 case CONJ_EXPR:
5247 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5248 return convert (type, arg0);
5249 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5250 return build (COMPLEX_EXPR, type,
5251 TREE_OPERAND (arg0, 0),
5252 negate_expr (TREE_OPERAND (arg0, 1)));
5253 else if (TREE_CODE (arg0) == COMPLEX_CST)
5254 return build_complex (type, TREE_OPERAND (arg0, 0),
5255 negate_expr (TREE_OPERAND (arg0, 1)));
5256 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5257 return fold (build (TREE_CODE (arg0), type,
5258 fold (build1 (CONJ_EXPR, type,
5259 TREE_OPERAND (arg0, 0))),
5260 fold (build1 (CONJ_EXPR,
5261 type, TREE_OPERAND (arg0, 1)))));
5262 else if (TREE_CODE (arg0) == CONJ_EXPR)
5263 return TREE_OPERAND (arg0, 0);
5264 return t;
5265
5266 case BIT_NOT_EXPR:
5267 if (wins)
5268 {
5269 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5270 ~ TREE_INT_CST_HIGH (arg0));
5271 TREE_TYPE (t) = type;
5272 force_fit_type (t, 0);
5273 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5274 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5275 }
5276 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5277 return TREE_OPERAND (arg0, 0);
5278 return t;
5279
5280 case PLUS_EXPR:
5281 /* A + (-B) -> A - B */
5282 if (TREE_CODE (arg1) == NEGATE_EXPR)
5283 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5284 /* (-A) + B -> B - A */
5285 if (TREE_CODE (arg0) == NEGATE_EXPR)
5286 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5287 else if (! FLOAT_TYPE_P (type))
5288 {
5289 if (integer_zerop (arg1))
5290 return non_lvalue (convert (type, arg0));
5291
5292 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5293 with a constant, and the two constants have no bits in common,
5294 we should treat this as a BIT_IOR_EXPR since this may produce more
5295 simplifications. */
5296 if (TREE_CODE (arg0) == BIT_AND_EXPR
5297 && TREE_CODE (arg1) == BIT_AND_EXPR
5298 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5299 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5300 && integer_zerop (const_binop (BIT_AND_EXPR,
5301 TREE_OPERAND (arg0, 1),
5302 TREE_OPERAND (arg1, 1), 0)))
5303 {
5304 code = BIT_IOR_EXPR;
5305 goto bit_ior;
5306 }
5307
5308 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5309 (plus (plus (mult) (mult)) (foo)) so that we can
5310 take advantage of the factoring cases below. */
5311 if ((TREE_CODE (arg0) == PLUS_EXPR
5312 && TREE_CODE (arg1) == MULT_EXPR)
5313 || (TREE_CODE (arg1) == PLUS_EXPR
5314 && TREE_CODE (arg0) == MULT_EXPR))
5315 {
5316 tree parg0, parg1, parg, marg;
5317
5318 if (TREE_CODE (arg0) == PLUS_EXPR)
5319 parg = arg0, marg = arg1;
5320 else
5321 parg = arg1, marg = arg0;
5322 parg0 = TREE_OPERAND (parg, 0);
5323 parg1 = TREE_OPERAND (parg, 1);
5324 STRIP_NOPS (parg0);
5325 STRIP_NOPS (parg1);
5326
5327 if (TREE_CODE (parg0) == MULT_EXPR
5328 && TREE_CODE (parg1) != MULT_EXPR)
5329 return fold (build (PLUS_EXPR, type,
5330 fold (build (PLUS_EXPR, type, parg0, marg)),
5331 parg1));
5332 if (TREE_CODE (parg0) != MULT_EXPR
5333 && TREE_CODE (parg1) == MULT_EXPR)
5334 return fold (build (PLUS_EXPR, type,
5335 fold (build (PLUS_EXPR, type, parg1, marg)),
5336 parg0));
5337 }
5338
5339 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5340 {
5341 tree arg00, arg01, arg10, arg11;
5342 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5343
5344 /* (A * C) + (B * C) -> (A+B) * C.
5345 We are most concerned about the case where C is a constant,
5346 but other combinations show up during loop reduction. Since
5347 it is not difficult, try all four possibilities. */
5348
5349 arg00 = TREE_OPERAND (arg0, 0);
5350 arg01 = TREE_OPERAND (arg0, 1);
5351 arg10 = TREE_OPERAND (arg1, 0);
5352 arg11 = TREE_OPERAND (arg1, 1);
5353 same = NULL_TREE;
5354
5355 if (operand_equal_p (arg01, arg11, 0))
5356 same = arg01, alt0 = arg00, alt1 = arg10;
5357 else if (operand_equal_p (arg00, arg10, 0))
5358 same = arg00, alt0 = arg01, alt1 = arg11;
5359 else if (operand_equal_p (arg00, arg11, 0))
5360 same = arg00, alt0 = arg01, alt1 = arg10;
5361 else if (operand_equal_p (arg01, arg10, 0))
5362 same = arg01, alt0 = arg00, alt1 = arg11;
5363
5364 /* No identical multiplicands; see if we can find a common
5365 power-of-two factor in non-power-of-two multiplies. This
5366 can help in multi-dimensional array access. */
5367 else if (TREE_CODE (arg01) == INTEGER_CST
5368 && TREE_CODE (arg11) == INTEGER_CST
5369 && TREE_INT_CST_HIGH (arg01) == 0
5370 && TREE_INT_CST_HIGH (arg11) == 0)
5371 {
5372 HOST_WIDE_INT int01, int11, tmp;
5373 int01 = TREE_INT_CST_LOW (arg01);
5374 int11 = TREE_INT_CST_LOW (arg11);
5375
5376 /* Move min of absolute values to int11. */
5377 if ((int01 >= 0 ? int01 : -int01)
5378 < (int11 >= 0 ? int11 : -int11))
5379 {
5380 tmp = int01, int01 = int11, int11 = tmp;
5381 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5382 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5383 }
5384
5385 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5386 {
5387 alt0 = fold (build (MULT_EXPR, type, arg00,
5388 build_int_2 (int01 / int11, 0)));
5389 alt1 = arg10;
5390 same = arg11;
5391 }
5392 }
5393
5394 if (same)
5395 return fold (build (MULT_EXPR, type,
5396 fold (build (PLUS_EXPR, type, alt0, alt1)),
5397 same));
5398 }
5399 }
5400 /* In IEEE floating point, x+0 may not equal x. */
5401 else if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5402 || flag_fast_math)
5403 && real_zerop (arg1))
5404 return non_lvalue (convert (type, arg0));
5405 /* x+(-0) equals x, even for IEEE. */
5406 else if (TREE_CODE (arg1) == REAL_CST
5407 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (arg1)))
5408 return non_lvalue (convert (type, arg0));
5409
5410 bit_rotate:
5411 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5412 is a rotate of A by C1 bits. */
5413 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5414 is a rotate of A by B bits. */
5415 {
5416 register enum tree_code code0, code1;
5417 code0 = TREE_CODE (arg0);
5418 code1 = TREE_CODE (arg1);
5419 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5420 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5421 && operand_equal_p (TREE_OPERAND (arg0, 0),
5422 TREE_OPERAND (arg1,0), 0)
5423 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5424 {
5425 register tree tree01, tree11;
5426 register enum tree_code code01, code11;
5427
5428 tree01 = TREE_OPERAND (arg0, 1);
5429 tree11 = TREE_OPERAND (arg1, 1);
5430 STRIP_NOPS (tree01);
5431 STRIP_NOPS (tree11);
5432 code01 = TREE_CODE (tree01);
5433 code11 = TREE_CODE (tree11);
5434 if (code01 == INTEGER_CST
5435 && code11 == INTEGER_CST
5436 && TREE_INT_CST_HIGH (tree01) == 0
5437 && TREE_INT_CST_HIGH (tree11) == 0
5438 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5439 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5440 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5441 code0 == LSHIFT_EXPR ? tree01 : tree11);
5442 else if (code11 == MINUS_EXPR)
5443 {
5444 tree tree110, tree111;
5445 tree110 = TREE_OPERAND (tree11, 0);
5446 tree111 = TREE_OPERAND (tree11, 1);
5447 STRIP_NOPS (tree110);
5448 STRIP_NOPS (tree111);
5449 if (TREE_CODE (tree110) == INTEGER_CST
5450 && 0 == compare_tree_int (tree110,
5451 TYPE_PRECISION
5452 (TREE_TYPE (TREE_OPERAND
5453 (arg0, 0))))
5454 && operand_equal_p (tree01, tree111, 0))
5455 return build ((code0 == LSHIFT_EXPR
5456 ? LROTATE_EXPR
5457 : RROTATE_EXPR),
5458 type, TREE_OPERAND (arg0, 0), tree01);
5459 }
5460 else if (code01 == MINUS_EXPR)
5461 {
5462 tree tree010, tree011;
5463 tree010 = TREE_OPERAND (tree01, 0);
5464 tree011 = TREE_OPERAND (tree01, 1);
5465 STRIP_NOPS (tree010);
5466 STRIP_NOPS (tree011);
5467 if (TREE_CODE (tree010) == INTEGER_CST
5468 && 0 == compare_tree_int (tree010,
5469 TYPE_PRECISION
5470 (TREE_TYPE (TREE_OPERAND
5471 (arg0, 0))))
5472 && operand_equal_p (tree11, tree011, 0))
5473 return build ((code0 != LSHIFT_EXPR
5474 ? LROTATE_EXPR
5475 : RROTATE_EXPR),
5476 type, TREE_OPERAND (arg0, 0), tree11);
5477 }
5478 }
5479 }
5480
5481
5482 associate:
5483 /* In most languages, can't associate operations on floats through
5484 parentheses. Rather than remember where the parentheses were, we
5485 don't associate floats at all. It shouldn't matter much. However,
5486 associating multiplications is only very slightly inaccurate, so do
5487 that if -ffast-math is specified. */
5488
5489 if (! wins
5490 && (! FLOAT_TYPE_P (type)
5491 || (flag_fast_math && code != MULT_EXPR)))
5492 {
5493 tree var0, con0, lit0, var1, con1, lit1;
5494
5495 /* Split both trees into variables, constants, and literals. Then
5496 associate each group together, the constants with literals,
5497 then the result with variables. This increases the chances of
5498 literals being recombined later and of generating relocatable
5499 expressions for the sum of a constant and literal. */
5500 var0 = split_tree (arg0, code, &con0, &lit0, 0);
5501 var1 = split_tree (arg1, code, &con1, &lit1, code == MINUS_EXPR);
5502
5503 /* Only do something if we found more than two objects. Otherwise,
5504 nothing has changed and we risk infinite recursion. */
5505 if (2 < ((var0 != 0) + (var1 != 0) + (con0 != 0) + (con1 != 0)
5506 + (lit0 != 0) + (lit1 != 0)))
5507 {
5508 var0 = associate_trees (var0, var1, code, type);
5509 con0 = associate_trees (con0, con1, code, type);
5510 lit0 = associate_trees (lit0, lit1, code, type);
5511 con0 = associate_trees (con0, lit0, code, type);
5512 return convert (type, associate_trees (var0, con0, code, type));
5513 }
5514 }
5515
5516 binary:
5517 #if defined (REAL_IS_NOT_DOUBLE) && ! defined (REAL_ARITHMETIC)
5518 if (TREE_CODE (arg1) == REAL_CST)
5519 return t;
5520 #endif /* REAL_IS_NOT_DOUBLE, and no REAL_ARITHMETIC */
5521 if (wins)
5522 t1 = const_binop (code, arg0, arg1, 0);
5523 if (t1 != NULL_TREE)
5524 {
5525 /* The return value should always have
5526 the same type as the original expression. */
5527 if (TREE_TYPE (t1) != TREE_TYPE (t))
5528 t1 = convert (TREE_TYPE (t), t1);
5529
5530 return t1;
5531 }
5532 return t;
5533
5534 case MINUS_EXPR:
5535 /* A - (-B) -> A + B */
5536 if (TREE_CODE (arg1) == NEGATE_EXPR)
5537 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5538 /* (-A) - CST -> (-CST) - A for floating point (what about ints ?) */
5539 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
5540 return
5541 fold (build (MINUS_EXPR, type,
5542 build_real (TREE_TYPE (arg1),
5543 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1))),
5544 TREE_OPERAND (arg0, 0)));
5545
5546 if (! FLOAT_TYPE_P (type))
5547 {
5548 if (! wins && integer_zerop (arg0))
5549 return convert (type, negate_expr (arg1));
5550 if (integer_zerop (arg1))
5551 return non_lvalue (convert (type, arg0));
5552
5553 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
5554 about the case where C is a constant, just try one of the
5555 four possibilities. */
5556
5557 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
5558 && operand_equal_p (TREE_OPERAND (arg0, 1),
5559 TREE_OPERAND (arg1, 1), 0))
5560 return fold (build (MULT_EXPR, type,
5561 fold (build (MINUS_EXPR, type,
5562 TREE_OPERAND (arg0, 0),
5563 TREE_OPERAND (arg1, 0))),
5564 TREE_OPERAND (arg0, 1)));
5565 }
5566
5567 else if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5568 || flag_fast_math)
5569 {
5570 /* Except with IEEE floating point, 0-x equals -x. */
5571 if (! wins && real_zerop (arg0))
5572 return convert (type, negate_expr (arg1));
5573 /* Except with IEEE floating point, x-0 equals x. */
5574 if (real_zerop (arg1))
5575 return non_lvalue (convert (type, arg0));
5576 }
5577
5578 /* Fold &x - &x. This can happen from &x.foo - &x.
5579 This is unsafe for certain floats even in non-IEEE formats.
5580 In IEEE, it is unsafe because it does wrong for NaNs.
5581 Also note that operand_equal_p is always false if an operand
5582 is volatile. */
5583
5584 if ((! FLOAT_TYPE_P (type) || flag_fast_math)
5585 && operand_equal_p (arg0, arg1, 0))
5586 return convert (type, integer_zero_node);
5587
5588 goto associate;
5589
5590 case MULT_EXPR:
5591 /* (-A) * (-B) -> A * B */
5592 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5593 return fold (build (MULT_EXPR, type, TREE_OPERAND (arg0, 0),
5594 TREE_OPERAND (arg1, 0)));
5595
5596 if (! FLOAT_TYPE_P (type))
5597 {
5598 if (integer_zerop (arg1))
5599 return omit_one_operand (type, arg1, arg0);
5600 if (integer_onep (arg1))
5601 return non_lvalue (convert (type, arg0));
5602
5603 /* (a * (1 << b)) is (a << b) */
5604 if (TREE_CODE (arg1) == LSHIFT_EXPR
5605 && integer_onep (TREE_OPERAND (arg1, 0)))
5606 return fold (build (LSHIFT_EXPR, type, arg0,
5607 TREE_OPERAND (arg1, 1)));
5608 if (TREE_CODE (arg0) == LSHIFT_EXPR
5609 && integer_onep (TREE_OPERAND (arg0, 0)))
5610 return fold (build (LSHIFT_EXPR, type, arg1,
5611 TREE_OPERAND (arg0, 1)));
5612
5613 if (TREE_CODE (arg1) == INTEGER_CST
5614 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5615 code, NULL_TREE)))
5616 return convert (type, tem);
5617
5618 }
5619 else
5620 {
5621 /* x*0 is 0, except for IEEE floating point. */
5622 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5623 || flag_fast_math)
5624 && real_zerop (arg1))
5625 return omit_one_operand (type, arg1, arg0);
5626 /* In IEEE floating point, x*1 is not equivalent to x for snans.
5627 However, ANSI says we can drop signals,
5628 so we can do this anyway. */
5629 if (real_onep (arg1))
5630 return non_lvalue (convert (type, arg0));
5631 /* x*2 is x+x */
5632 if (! wins && real_twop (arg1) && global_bindings_p () == 0
5633 && ! contains_placeholder_p (arg0))
5634 {
5635 tree arg = save_expr (arg0);
5636 return build (PLUS_EXPR, type, arg, arg);
5637 }
5638 }
5639 goto associate;
5640
5641 case BIT_IOR_EXPR:
5642 bit_ior:
5643 if (integer_all_onesp (arg1))
5644 return omit_one_operand (type, arg1, arg0);
5645 if (integer_zerop (arg1))
5646 return non_lvalue (convert (type, arg0));
5647 t1 = distribute_bit_expr (code, type, arg0, arg1);
5648 if (t1 != NULL_TREE)
5649 return t1;
5650
5651 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
5652
5653 This results in more efficient code for machines without a NAND
5654 instruction. Combine will canonicalize to the first form
5655 which will allow use of NAND instructions provided by the
5656 backend if they exist. */
5657 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5658 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5659 {
5660 return fold (build1 (BIT_NOT_EXPR, type,
5661 build (BIT_AND_EXPR, type,
5662 TREE_OPERAND (arg0, 0),
5663 TREE_OPERAND (arg1, 0))));
5664 }
5665
5666 /* See if this can be simplified into a rotate first. If that
5667 is unsuccessful continue in the association code. */
5668 goto bit_rotate;
5669
5670 case BIT_XOR_EXPR:
5671 if (integer_zerop (arg1))
5672 return non_lvalue (convert (type, arg0));
5673 if (integer_all_onesp (arg1))
5674 return fold (build1 (BIT_NOT_EXPR, type, arg0));
5675
5676 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
5677 with a constant, and the two constants have no bits in common,
5678 we should treat this as a BIT_IOR_EXPR since this may produce more
5679 simplifications. */
5680 if (TREE_CODE (arg0) == BIT_AND_EXPR
5681 && TREE_CODE (arg1) == BIT_AND_EXPR
5682 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5683 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5684 && integer_zerop (const_binop (BIT_AND_EXPR,
5685 TREE_OPERAND (arg0, 1),
5686 TREE_OPERAND (arg1, 1), 0)))
5687 {
5688 code = BIT_IOR_EXPR;
5689 goto bit_ior;
5690 }
5691
5692 /* See if this can be simplified into a rotate first. If that
5693 is unsuccessful continue in the association code. */
5694 goto bit_rotate;
5695
5696 case BIT_AND_EXPR:
5697 bit_and:
5698 if (integer_all_onesp (arg1))
5699 return non_lvalue (convert (type, arg0));
5700 if (integer_zerop (arg1))
5701 return omit_one_operand (type, arg1, arg0);
5702 t1 = distribute_bit_expr (code, type, arg0, arg1);
5703 if (t1 != NULL_TREE)
5704 return t1;
5705 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
5706 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == NOP_EXPR
5707 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0))))
5708 {
5709 unsigned int prec
5710 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)));
5711
5712 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
5713 && (~TREE_INT_CST_LOW (arg0)
5714 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
5715 return build1 (NOP_EXPR, type, TREE_OPERAND (arg1, 0));
5716 }
5717 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
5718 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5719 {
5720 unsigned int prec
5721 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
5722
5723 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
5724 && (~TREE_INT_CST_LOW (arg1)
5725 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
5726 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
5727 }
5728
5729 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
5730
5731 This results in more efficient code for machines without a NOR
5732 instruction. Combine will canonicalize to the first form
5733 which will allow use of NOR instructions provided by the
5734 backend if they exist. */
5735 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5736 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5737 {
5738 return fold (build1 (BIT_NOT_EXPR, type,
5739 build (BIT_IOR_EXPR, type,
5740 TREE_OPERAND (arg0, 0),
5741 TREE_OPERAND (arg1, 0))));
5742 }
5743
5744 goto associate;
5745
5746 case BIT_ANDTC_EXPR:
5747 if (integer_all_onesp (arg0))
5748 return non_lvalue (convert (type, arg1));
5749 if (integer_zerop (arg0))
5750 return omit_one_operand (type, arg0, arg1);
5751 if (TREE_CODE (arg1) == INTEGER_CST)
5752 {
5753 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
5754 code = BIT_AND_EXPR;
5755 goto bit_and;
5756 }
5757 goto binary;
5758
5759 case RDIV_EXPR:
5760 /* In most cases, do nothing with a divide by zero. */
5761 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
5762 #ifndef REAL_INFINITY
5763 if (TREE_CODE (arg1) == REAL_CST && real_zerop (arg1))
5764 return t;
5765 #endif
5766 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
5767
5768 /* (-A) / (-B) -> A / B */
5769 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5770 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
5771 TREE_OPERAND (arg1, 0)));
5772
5773 /* In IEEE floating point, x/1 is not equivalent to x for snans.
5774 However, ANSI says we can drop signals, so we can do this anyway. */
5775 if (real_onep (arg1))
5776 return non_lvalue (convert (type, arg0));
5777
5778 /* If ARG1 is a constant, we can convert this to a multiply by the
5779 reciprocal. This does not have the same rounding properties,
5780 so only do this if -ffast-math. We can actually always safely
5781 do it if ARG1 is a power of two, but it's hard to tell if it is
5782 or not in a portable manner. */
5783 if (TREE_CODE (arg1) == REAL_CST)
5784 {
5785 if (flag_fast_math
5786 && 0 != (tem = const_binop (code, build_real (type, dconst1),
5787 arg1, 0)))
5788 return fold (build (MULT_EXPR, type, arg0, tem));
5789 /* Find the reciprocal if optimizing and the result is exact. */
5790 else if (optimize)
5791 {
5792 REAL_VALUE_TYPE r;
5793 r = TREE_REAL_CST (arg1);
5794 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
5795 {
5796 tem = build_real (type, r);
5797 return fold (build (MULT_EXPR, type, arg0, tem));
5798 }
5799 }
5800 }
5801 goto binary;
5802
5803 case TRUNC_DIV_EXPR:
5804 case ROUND_DIV_EXPR:
5805 case FLOOR_DIV_EXPR:
5806 case CEIL_DIV_EXPR:
5807 case EXACT_DIV_EXPR:
5808 if (integer_onep (arg1))
5809 return non_lvalue (convert (type, arg0));
5810 if (integer_zerop (arg1))
5811 return t;
5812
5813 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
5814 operation, EXACT_DIV_EXPR.
5815
5816 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
5817 At one time others generated faster code, it's not clear if they do
5818 after the last round to changes to the DIV code in expmed.c. */
5819 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
5820 && multiple_of_p (type, arg0, arg1))
5821 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
5822
5823 if (TREE_CODE (arg1) == INTEGER_CST
5824 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5825 code, NULL_TREE)))
5826 return convert (type, tem);
5827
5828 goto binary;
5829
5830 case CEIL_MOD_EXPR:
5831 case FLOOR_MOD_EXPR:
5832 case ROUND_MOD_EXPR:
5833 case TRUNC_MOD_EXPR:
5834 if (integer_onep (arg1))
5835 return omit_one_operand (type, integer_zero_node, arg0);
5836 if (integer_zerop (arg1))
5837 return t;
5838
5839 if (TREE_CODE (arg1) == INTEGER_CST
5840 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5841 code, NULL_TREE)))
5842 return convert (type, tem);
5843
5844 goto binary;
5845
5846 case LSHIFT_EXPR:
5847 case RSHIFT_EXPR:
5848 case LROTATE_EXPR:
5849 case RROTATE_EXPR:
5850 if (integer_zerop (arg1))
5851 return non_lvalue (convert (type, arg0));
5852 /* Since negative shift count is not well-defined,
5853 don't try to compute it in the compiler. */
5854 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
5855 return t;
5856 /* Rewrite an LROTATE_EXPR by a constant into an
5857 RROTATE_EXPR by a new constant. */
5858 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
5859 {
5860 TREE_SET_CODE (t, RROTATE_EXPR);
5861 code = RROTATE_EXPR;
5862 TREE_OPERAND (t, 1) = arg1
5863 = const_binop
5864 (MINUS_EXPR,
5865 convert (TREE_TYPE (arg1),
5866 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
5867 arg1, 0);
5868 if (tree_int_cst_sgn (arg1) < 0)
5869 return t;
5870 }
5871
5872 /* If we have a rotate of a bit operation with the rotate count and
5873 the second operand of the bit operation both constant,
5874 permute the two operations. */
5875 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5876 && (TREE_CODE (arg0) == BIT_AND_EXPR
5877 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
5878 || TREE_CODE (arg0) == BIT_IOR_EXPR
5879 || TREE_CODE (arg0) == BIT_XOR_EXPR)
5880 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
5881 return fold (build (TREE_CODE (arg0), type,
5882 fold (build (code, type,
5883 TREE_OPERAND (arg0, 0), arg1)),
5884 fold (build (code, type,
5885 TREE_OPERAND (arg0, 1), arg1))));
5886
5887 /* Two consecutive rotates adding up to the width of the mode can
5888 be ignored. */
5889 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5890 && TREE_CODE (arg0) == RROTATE_EXPR
5891 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5892 && TREE_INT_CST_HIGH (arg1) == 0
5893 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
5894 && ((TREE_INT_CST_LOW (arg1)
5895 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
5896 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
5897 return TREE_OPERAND (arg0, 0);
5898
5899 goto binary;
5900
5901 case MIN_EXPR:
5902 if (operand_equal_p (arg0, arg1, 0))
5903 return omit_one_operand (type, arg0, arg1);
5904 if (INTEGRAL_TYPE_P (type)
5905 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
5906 return omit_one_operand (type, arg1, arg0);
5907 goto associate;
5908
5909 case MAX_EXPR:
5910 if (operand_equal_p (arg0, arg1, 0))
5911 return omit_one_operand (type, arg0, arg1);
5912 if (INTEGRAL_TYPE_P (type)
5913 && TYPE_MAX_VALUE (type)
5914 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
5915 return omit_one_operand (type, arg1, arg0);
5916 goto associate;
5917
5918 case TRUTH_NOT_EXPR:
5919 /* Note that the operand of this must be an int
5920 and its values must be 0 or 1.
5921 ("true" is a fixed value perhaps depending on the language,
5922 but we don't handle values other than 1 correctly yet.) */
5923 tem = invert_truthvalue (arg0);
5924 /* Avoid infinite recursion. */
5925 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
5926 return t;
5927 return convert (type, tem);
5928
5929 case TRUTH_ANDIF_EXPR:
5930 /* Note that the operands of this must be ints
5931 and their values must be 0 or 1.
5932 ("true" is a fixed value perhaps depending on the language.) */
5933 /* If first arg is constant zero, return it. */
5934 if (integer_zerop (arg0))
5935 return convert (type, arg0);
5936 case TRUTH_AND_EXPR:
5937 /* If either arg is constant true, drop it. */
5938 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5939 return non_lvalue (convert (type, arg1));
5940 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
5941 return non_lvalue (convert (type, arg0));
5942 /* If second arg is constant zero, result is zero, but first arg
5943 must be evaluated. */
5944 if (integer_zerop (arg1))
5945 return omit_one_operand (type, arg1, arg0);
5946 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
5947 case will be handled here. */
5948 if (integer_zerop (arg0))
5949 return omit_one_operand (type, arg0, arg1);
5950
5951 truth_andor:
5952 /* We only do these simplifications if we are optimizing. */
5953 if (!optimize)
5954 return t;
5955
5956 /* Check for things like (A || B) && (A || C). We can convert this
5957 to A || (B && C). Note that either operator can be any of the four
5958 truth and/or operations and the transformation will still be
5959 valid. Also note that we only care about order for the
5960 ANDIF and ORIF operators. If B contains side effects, this
5961 might change the truth-value of A. */
5962 if (TREE_CODE (arg0) == TREE_CODE (arg1)
5963 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
5964 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
5965 || TREE_CODE (arg0) == TRUTH_AND_EXPR
5966 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
5967 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
5968 {
5969 tree a00 = TREE_OPERAND (arg0, 0);
5970 tree a01 = TREE_OPERAND (arg0, 1);
5971 tree a10 = TREE_OPERAND (arg1, 0);
5972 tree a11 = TREE_OPERAND (arg1, 1);
5973 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
5974 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
5975 && (code == TRUTH_AND_EXPR
5976 || code == TRUTH_OR_EXPR));
5977
5978 if (operand_equal_p (a00, a10, 0))
5979 return fold (build (TREE_CODE (arg0), type, a00,
5980 fold (build (code, type, a01, a11))));
5981 else if (commutative && operand_equal_p (a00, a11, 0))
5982 return fold (build (TREE_CODE (arg0), type, a00,
5983 fold (build (code, type, a01, a10))));
5984 else if (commutative && operand_equal_p (a01, a10, 0))
5985 return fold (build (TREE_CODE (arg0), type, a01,
5986 fold (build (code, type, a00, a11))));
5987
5988 /* This case if tricky because we must either have commutative
5989 operators or else A10 must not have side-effects. */
5990
5991 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
5992 && operand_equal_p (a01, a11, 0))
5993 return fold (build (TREE_CODE (arg0), type,
5994 fold (build (code, type, a00, a10)),
5995 a01));
5996 }
5997
5998 /* See if we can build a range comparison. */
5999 if (0 != (tem = fold_range_test (t)))
6000 return tem;
6001
6002 /* Check for the possibility of merging component references. If our
6003 lhs is another similar operation, try to merge its rhs with our
6004 rhs. Then try to merge our lhs and rhs. */
6005 if (TREE_CODE (arg0) == code
6006 && 0 != (tem = fold_truthop (code, type,
6007 TREE_OPERAND (arg0, 1), arg1)))
6008 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6009
6010 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
6011 return tem;
6012
6013 return t;
6014
6015 case TRUTH_ORIF_EXPR:
6016 /* Note that the operands of this must be ints
6017 and their values must be 0 or true.
6018 ("true" is a fixed value perhaps depending on the language.) */
6019 /* If first arg is constant true, return it. */
6020 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6021 return convert (type, arg0);
6022 case TRUTH_OR_EXPR:
6023 /* If either arg is constant zero, drop it. */
6024 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
6025 return non_lvalue (convert (type, arg1));
6026 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1))
6027 return non_lvalue (convert (type, arg0));
6028 /* If second arg is constant true, result is true, but we must
6029 evaluate first arg. */
6030 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
6031 return omit_one_operand (type, arg1, arg0);
6032 /* Likewise for first arg, but note this only occurs here for
6033 TRUTH_OR_EXPR. */
6034 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6035 return omit_one_operand (type, arg0, arg1);
6036 goto truth_andor;
6037
6038 case TRUTH_XOR_EXPR:
6039 /* If either arg is constant zero, drop it. */
6040 if (integer_zerop (arg0))
6041 return non_lvalue (convert (type, arg1));
6042 if (integer_zerop (arg1))
6043 return non_lvalue (convert (type, arg0));
6044 /* If either arg is constant true, this is a logical inversion. */
6045 if (integer_onep (arg0))
6046 return non_lvalue (convert (type, invert_truthvalue (arg1)));
6047 if (integer_onep (arg1))
6048 return non_lvalue (convert (type, invert_truthvalue (arg0)));
6049 return t;
6050
6051 case EQ_EXPR:
6052 case NE_EXPR:
6053 case LT_EXPR:
6054 case GT_EXPR:
6055 case LE_EXPR:
6056 case GE_EXPR:
6057 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
6058 {
6059 /* (-a) CMP (-b) -> b CMP a */
6060 if (TREE_CODE (arg0) == NEGATE_EXPR
6061 && TREE_CODE (arg1) == NEGATE_EXPR)
6062 return fold (build (code, type, TREE_OPERAND (arg1, 0),
6063 TREE_OPERAND (arg0, 0)));
6064 /* (-a) CMP CST -> a swap(CMP) (-CST) */
6065 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
6066 return
6067 fold (build
6068 (swap_tree_comparison (code), type,
6069 TREE_OPERAND (arg0, 0),
6070 build_real (TREE_TYPE (arg1),
6071 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1)))));
6072 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
6073 /* a CMP (-0) -> a CMP 0 */
6074 if (TREE_CODE (arg1) == REAL_CST
6075 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (arg1)))
6076 return fold (build (code, type, arg0,
6077 build_real (TREE_TYPE (arg1), dconst0)));
6078 }
6079
6080
6081 /* If one arg is a constant integer, put it last. */
6082 if (TREE_CODE (arg0) == INTEGER_CST
6083 && TREE_CODE (arg1) != INTEGER_CST)
6084 {
6085 TREE_OPERAND (t, 0) = arg1;
6086 TREE_OPERAND (t, 1) = arg0;
6087 arg0 = TREE_OPERAND (t, 0);
6088 arg1 = TREE_OPERAND (t, 1);
6089 code = swap_tree_comparison (code);
6090 TREE_SET_CODE (t, code);
6091 }
6092
6093 /* Convert foo++ == CONST into ++foo == CONST + INCR.
6094 First, see if one arg is constant; find the constant arg
6095 and the other one. */
6096 {
6097 tree constop = 0, varop = NULL_TREE;
6098 int constopnum = -1;
6099
6100 if (TREE_CONSTANT (arg1))
6101 constopnum = 1, constop = arg1, varop = arg0;
6102 if (TREE_CONSTANT (arg0))
6103 constopnum = 0, constop = arg0, varop = arg1;
6104
6105 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
6106 {
6107 /* This optimization is invalid for ordered comparisons
6108 if CONST+INCR overflows or if foo+incr might overflow.
6109 This optimization is invalid for floating point due to rounding.
6110 For pointer types we assume overflow doesn't happen. */
6111 if (POINTER_TYPE_P (TREE_TYPE (varop))
6112 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6113 && (code == EQ_EXPR || code == NE_EXPR)))
6114 {
6115 tree newconst
6116 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
6117 constop, TREE_OPERAND (varop, 1)));
6118 TREE_SET_CODE (varop, PREINCREMENT_EXPR);
6119
6120 /* If VAROP is a reference to a bitfield, we must mask
6121 the constant by the width of the field. */
6122 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6123 && DECL_BIT_FIELD(TREE_OPERAND
6124 (TREE_OPERAND (varop, 0), 1)))
6125 {
6126 int size
6127 = TREE_INT_CST_LOW (DECL_SIZE
6128 (TREE_OPERAND
6129 (TREE_OPERAND (varop, 0), 1)));
6130 tree mask, unsigned_type;
6131 unsigned int precision;
6132 tree folded_compare;
6133
6134 /* First check whether the comparison would come out
6135 always the same. If we don't do that we would
6136 change the meaning with the masking. */
6137 if (constopnum == 0)
6138 folded_compare = fold (build (code, type, constop,
6139 TREE_OPERAND (varop, 0)));
6140 else
6141 folded_compare = fold (build (code, type,
6142 TREE_OPERAND (varop, 0),
6143 constop));
6144 if (integer_zerop (folded_compare)
6145 || integer_onep (folded_compare))
6146 return omit_one_operand (type, folded_compare, varop);
6147
6148 unsigned_type = type_for_size (size, 1);
6149 precision = TYPE_PRECISION (unsigned_type);
6150 mask = build_int_2 (~0, ~0);
6151 TREE_TYPE (mask) = unsigned_type;
6152 force_fit_type (mask, 0);
6153 mask = const_binop (RSHIFT_EXPR, mask,
6154 size_int (precision - size), 0);
6155 newconst = fold (build (BIT_AND_EXPR,
6156 TREE_TYPE (varop), newconst,
6157 convert (TREE_TYPE (varop),
6158 mask)));
6159 }
6160
6161
6162 t = build (code, type, TREE_OPERAND (t, 0),
6163 TREE_OPERAND (t, 1));
6164 TREE_OPERAND (t, constopnum) = newconst;
6165 return t;
6166 }
6167 }
6168 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
6169 {
6170 if (POINTER_TYPE_P (TREE_TYPE (varop))
6171 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6172 && (code == EQ_EXPR || code == NE_EXPR)))
6173 {
6174 tree newconst
6175 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
6176 constop, TREE_OPERAND (varop, 1)));
6177 TREE_SET_CODE (varop, PREDECREMENT_EXPR);
6178
6179 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6180 && DECL_BIT_FIELD(TREE_OPERAND
6181 (TREE_OPERAND (varop, 0), 1)))
6182 {
6183 int size
6184 = TREE_INT_CST_LOW (DECL_SIZE
6185 (TREE_OPERAND
6186 (TREE_OPERAND (varop, 0), 1)));
6187 tree mask, unsigned_type;
6188 unsigned int precision;
6189 tree folded_compare;
6190
6191 if (constopnum == 0)
6192 folded_compare = fold (build (code, type, constop,
6193 TREE_OPERAND (varop, 0)));
6194 else
6195 folded_compare = fold (build (code, type,
6196 TREE_OPERAND (varop, 0),
6197 constop));
6198 if (integer_zerop (folded_compare)
6199 || integer_onep (folded_compare))
6200 return omit_one_operand (type, folded_compare, varop);
6201
6202 unsigned_type = type_for_size (size, 1);
6203 precision = TYPE_PRECISION (unsigned_type);
6204 mask = build_int_2 (~0, ~0);
6205 TREE_TYPE (mask) = TREE_TYPE (varop);
6206 force_fit_type (mask, 0);
6207 mask = const_binop (RSHIFT_EXPR, mask,
6208 size_int (precision - size), 0);
6209 newconst = fold (build (BIT_AND_EXPR,
6210 TREE_TYPE (varop), newconst,
6211 convert (TREE_TYPE (varop),
6212 mask)));
6213 }
6214
6215
6216 t = build (code, type, TREE_OPERAND (t, 0),
6217 TREE_OPERAND (t, 1));
6218 TREE_OPERAND (t, constopnum) = newconst;
6219 return t;
6220 }
6221 }
6222 }
6223
6224 /* Change X >= CST to X > (CST - 1) if CST is positive. */
6225 if (TREE_CODE (arg1) == INTEGER_CST
6226 && TREE_CODE (arg0) != INTEGER_CST
6227 && tree_int_cst_sgn (arg1) > 0)
6228 {
6229 switch (TREE_CODE (t))
6230 {
6231 case GE_EXPR:
6232 code = GT_EXPR;
6233 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6234 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6235 break;
6236
6237 case LT_EXPR:
6238 code = LE_EXPR;
6239 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6240 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6241 break;
6242
6243 default:
6244 break;
6245 }
6246 }
6247
6248 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
6249 a MINUS_EXPR of a constant, we can convert it into a comparison with
6250 a revised constant as long as no overflow occurs. */
6251 if ((code == EQ_EXPR || code == NE_EXPR)
6252 && TREE_CODE (arg1) == INTEGER_CST
6253 && (TREE_CODE (arg0) == PLUS_EXPR
6254 || TREE_CODE (arg0) == MINUS_EXPR)
6255 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6256 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6257 ? MINUS_EXPR : PLUS_EXPR,
6258 arg1, TREE_OPERAND (arg0, 1), 0))
6259 && ! TREE_CONSTANT_OVERFLOW (tem))
6260 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6261
6262 /* Similarly for a NEGATE_EXPR. */
6263 else if ((code == EQ_EXPR || code == NE_EXPR)
6264 && TREE_CODE (arg0) == NEGATE_EXPR
6265 && TREE_CODE (arg1) == INTEGER_CST
6266 && 0 != (tem = negate_expr (arg1))
6267 && TREE_CODE (tem) == INTEGER_CST
6268 && ! TREE_CONSTANT_OVERFLOW (tem))
6269 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6270
6271 /* If we have X - Y == 0, we can convert that to X == Y and similarly
6272 for !=. Don't do this for ordered comparisons due to overflow. */
6273 else if ((code == NE_EXPR || code == EQ_EXPR)
6274 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
6275 return fold (build (code, type,
6276 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
6277
6278 /* If we are widening one operand of an integer comparison,
6279 see if the other operand is similarly being widened. Perhaps we
6280 can do the comparison in the narrower type. */
6281 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
6282 && TREE_CODE (arg0) == NOP_EXPR
6283 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
6284 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
6285 && (TREE_TYPE (t1) == TREE_TYPE (tem)
6286 || (TREE_CODE (t1) == INTEGER_CST
6287 && int_fits_type_p (t1, TREE_TYPE (tem)))))
6288 return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
6289
6290 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
6291 constant, we can simplify it. */
6292 else if (TREE_CODE (arg1) == INTEGER_CST
6293 && (TREE_CODE (arg0) == MIN_EXPR
6294 || TREE_CODE (arg0) == MAX_EXPR)
6295 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6296 return optimize_minmax_comparison (t);
6297
6298 /* If we are comparing an ABS_EXPR with a constant, we can
6299 convert all the cases into explicit comparisons, but they may
6300 well not be faster than doing the ABS and one comparison.
6301 But ABS (X) <= C is a range comparison, which becomes a subtraction
6302 and a comparison, and is probably faster. */
6303 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6304 && TREE_CODE (arg0) == ABS_EXPR
6305 && ! TREE_SIDE_EFFECTS (arg0)
6306 && (0 != (tem = negate_expr (arg1)))
6307 && TREE_CODE (tem) == INTEGER_CST
6308 && ! TREE_CONSTANT_OVERFLOW (tem))
6309 return fold (build (TRUTH_ANDIF_EXPR, type,
6310 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
6311 build (LE_EXPR, type,
6312 TREE_OPERAND (arg0, 0), arg1)));
6313
6314 /* If this is an EQ or NE comparison with zero and ARG0 is
6315 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
6316 two operations, but the latter can be done in one less insn
6317 on machines that have only two-operand insns or on which a
6318 constant cannot be the first operand. */
6319 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
6320 && TREE_CODE (arg0) == BIT_AND_EXPR)
6321 {
6322 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
6323 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
6324 return
6325 fold (build (code, type,
6326 build (BIT_AND_EXPR, TREE_TYPE (arg0),
6327 build (RSHIFT_EXPR,
6328 TREE_TYPE (TREE_OPERAND (arg0, 0)),
6329 TREE_OPERAND (arg0, 1),
6330 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
6331 convert (TREE_TYPE (arg0),
6332 integer_one_node)),
6333 arg1));
6334 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
6335 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
6336 return
6337 fold (build (code, type,
6338 build (BIT_AND_EXPR, TREE_TYPE (arg0),
6339 build (RSHIFT_EXPR,
6340 TREE_TYPE (TREE_OPERAND (arg0, 1)),
6341 TREE_OPERAND (arg0, 0),
6342 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
6343 convert (TREE_TYPE (arg0),
6344 integer_one_node)),
6345 arg1));
6346 }
6347
6348 /* If this is an NE or EQ comparison of zero against the result of a
6349 signed MOD operation whose second operand is a power of 2, make
6350 the MOD operation unsigned since it is simpler and equivalent. */
6351 if ((code == NE_EXPR || code == EQ_EXPR)
6352 && integer_zerop (arg1)
6353 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
6354 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
6355 || TREE_CODE (arg0) == CEIL_MOD_EXPR
6356 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
6357 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
6358 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6359 {
6360 tree newtype = unsigned_type (TREE_TYPE (arg0));
6361 tree newmod = build (TREE_CODE (arg0), newtype,
6362 convert (newtype, TREE_OPERAND (arg0, 0)),
6363 convert (newtype, TREE_OPERAND (arg0, 1)));
6364
6365 return build (code, type, newmod, convert (newtype, arg1));
6366 }
6367
6368 /* If this is an NE comparison of zero with an AND of one, remove the
6369 comparison since the AND will give the correct value. */
6370 if (code == NE_EXPR && integer_zerop (arg1)
6371 && TREE_CODE (arg0) == BIT_AND_EXPR
6372 && integer_onep (TREE_OPERAND (arg0, 1)))
6373 return convert (type, arg0);
6374
6375 /* If we have (A & C) == C where C is a power of 2, convert this into
6376 (A & C) != 0. Similarly for NE_EXPR. */
6377 if ((code == EQ_EXPR || code == NE_EXPR)
6378 && TREE_CODE (arg0) == BIT_AND_EXPR
6379 && integer_pow2p (TREE_OPERAND (arg0, 1))
6380 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
6381 return build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
6382 arg0, integer_zero_node);
6383
6384 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
6385 and similarly for >= into !=. */
6386 if ((code == LT_EXPR || code == GE_EXPR)
6387 && TREE_UNSIGNED (TREE_TYPE (arg0))
6388 && TREE_CODE (arg1) == LSHIFT_EXPR
6389 && integer_onep (TREE_OPERAND (arg1, 0)))
6390 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6391 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6392 TREE_OPERAND (arg1, 1)),
6393 convert (TREE_TYPE (arg0), integer_zero_node));
6394
6395 else if ((code == LT_EXPR || code == GE_EXPR)
6396 && TREE_UNSIGNED (TREE_TYPE (arg0))
6397 && (TREE_CODE (arg1) == NOP_EXPR
6398 || TREE_CODE (arg1) == CONVERT_EXPR)
6399 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
6400 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
6401 return
6402 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6403 convert (TREE_TYPE (arg0),
6404 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6405 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
6406 convert (TREE_TYPE (arg0), integer_zero_node));
6407
6408 /* Simplify comparison of something with itself. (For IEEE
6409 floating-point, we can only do some of these simplifications.) */
6410 if (operand_equal_p (arg0, arg1, 0))
6411 {
6412 switch (code)
6413 {
6414 case EQ_EXPR:
6415 case GE_EXPR:
6416 case LE_EXPR:
6417 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6418 return constant_boolean_node (1, type);
6419 code = EQ_EXPR;
6420 TREE_SET_CODE (t, code);
6421 break;
6422
6423 case NE_EXPR:
6424 /* For NE, we can only do this simplification if integer. */
6425 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6426 break;
6427 /* ... fall through ... */
6428 case GT_EXPR:
6429 case LT_EXPR:
6430 return constant_boolean_node (0, type);
6431 default:
6432 abort ();
6433 }
6434 }
6435
6436 /* An unsigned comparison against 0 can be simplified. */
6437 if (integer_zerop (arg1)
6438 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6439 || POINTER_TYPE_P (TREE_TYPE (arg1)))
6440 && TREE_UNSIGNED (TREE_TYPE (arg1)))
6441 {
6442 switch (TREE_CODE (t))
6443 {
6444 case GT_EXPR:
6445 code = NE_EXPR;
6446 TREE_SET_CODE (t, NE_EXPR);
6447 break;
6448 case LE_EXPR:
6449 code = EQ_EXPR;
6450 TREE_SET_CODE (t, EQ_EXPR);
6451 break;
6452 case GE_EXPR:
6453 return omit_one_operand (type,
6454 convert (type, integer_one_node),
6455 arg0);
6456 case LT_EXPR:
6457 return omit_one_operand (type,
6458 convert (type, integer_zero_node),
6459 arg0);
6460 default:
6461 break;
6462 }
6463 }
6464
6465 /* Comparisons with the highest or lowest possible integer of
6466 the specified size will have known values and an unsigned
6467 <= 0x7fffffff can be simplified. */
6468 {
6469 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
6470
6471 if (TREE_CODE (arg1) == INTEGER_CST
6472 && ! TREE_CONSTANT_OVERFLOW (arg1)
6473 && width <= HOST_BITS_PER_WIDE_INT
6474 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6475 || POINTER_TYPE_P (TREE_TYPE (arg1))))
6476 {
6477 if (TREE_INT_CST_HIGH (arg1) == 0
6478 && (TREE_INT_CST_LOW (arg1)
6479 == ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1)
6480 && ! TREE_UNSIGNED (TREE_TYPE (arg1)))
6481 switch (TREE_CODE (t))
6482 {
6483 case GT_EXPR:
6484 return omit_one_operand (type,
6485 convert (type, integer_zero_node),
6486 arg0);
6487 case GE_EXPR:
6488 TREE_SET_CODE (t, EQ_EXPR);
6489 break;
6490
6491 case LE_EXPR:
6492 return omit_one_operand (type,
6493 convert (type, integer_one_node),
6494 arg0);
6495 case LT_EXPR:
6496 TREE_SET_CODE (t, NE_EXPR);
6497 break;
6498
6499 default:
6500 break;
6501 }
6502
6503 else if (TREE_INT_CST_HIGH (arg1) == -1
6504 && (- TREE_INT_CST_LOW (arg1)
6505 == ((unsigned HOST_WIDE_INT) 1 << (width - 1)))
6506 && ! TREE_UNSIGNED (TREE_TYPE (arg1)))
6507 switch (TREE_CODE (t))
6508 {
6509 case LT_EXPR:
6510 return omit_one_operand (type,
6511 convert (type, integer_zero_node),
6512 arg0);
6513 case LE_EXPR:
6514 TREE_SET_CODE (t, EQ_EXPR);
6515 break;
6516
6517 case GE_EXPR:
6518 return omit_one_operand (type,
6519 convert (type, integer_one_node),
6520 arg0);
6521 case GT_EXPR:
6522 TREE_SET_CODE (t, NE_EXPR);
6523 break;
6524
6525 default:
6526 break;
6527 }
6528
6529 else if (TREE_INT_CST_HIGH (arg1) == 0
6530 && (TREE_INT_CST_LOW (arg1)
6531 == ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1)
6532 && TREE_UNSIGNED (TREE_TYPE (arg1)))
6533
6534 switch (TREE_CODE (t))
6535 {
6536 case LE_EXPR:
6537 return fold (build (GE_EXPR, type,
6538 convert (signed_type (TREE_TYPE (arg0)),
6539 arg0),
6540 convert (signed_type (TREE_TYPE (arg1)),
6541 integer_zero_node)));
6542 case GT_EXPR:
6543 return fold (build (LT_EXPR, type,
6544 convert (signed_type (TREE_TYPE (arg0)),
6545 arg0),
6546 convert (signed_type (TREE_TYPE (arg1)),
6547 integer_zero_node)));
6548
6549 default:
6550 break;
6551 }
6552 }
6553 }
6554
6555 /* If we are comparing an expression that just has comparisons
6556 of two integer values, arithmetic expressions of those comparisons,
6557 and constants, we can simplify it. There are only three cases
6558 to check: the two values can either be equal, the first can be
6559 greater, or the second can be greater. Fold the expression for
6560 those three values. Since each value must be 0 or 1, we have
6561 eight possibilities, each of which corresponds to the constant 0
6562 or 1 or one of the six possible comparisons.
6563
6564 This handles common cases like (a > b) == 0 but also handles
6565 expressions like ((x > y) - (y > x)) > 0, which supposedly
6566 occur in macroized code. */
6567
6568 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
6569 {
6570 tree cval1 = 0, cval2 = 0;
6571 int save_p = 0;
6572
6573 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
6574 /* Don't handle degenerate cases here; they should already
6575 have been handled anyway. */
6576 && cval1 != 0 && cval2 != 0
6577 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
6578 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
6579 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
6580 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
6581 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
6582 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
6583 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
6584 {
6585 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
6586 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
6587
6588 /* We can't just pass T to eval_subst in case cval1 or cval2
6589 was the same as ARG1. */
6590
6591 tree high_result
6592 = fold (build (code, type,
6593 eval_subst (arg0, cval1, maxval, cval2, minval),
6594 arg1));
6595 tree equal_result
6596 = fold (build (code, type,
6597 eval_subst (arg0, cval1, maxval, cval2, maxval),
6598 arg1));
6599 tree low_result
6600 = fold (build (code, type,
6601 eval_subst (arg0, cval1, minval, cval2, maxval),
6602 arg1));
6603
6604 /* All three of these results should be 0 or 1. Confirm they
6605 are. Then use those values to select the proper code
6606 to use. */
6607
6608 if ((integer_zerop (high_result)
6609 || integer_onep (high_result))
6610 && (integer_zerop (equal_result)
6611 || integer_onep (equal_result))
6612 && (integer_zerop (low_result)
6613 || integer_onep (low_result)))
6614 {
6615 /* Make a 3-bit mask with the high-order bit being the
6616 value for `>', the next for '=', and the low for '<'. */
6617 switch ((integer_onep (high_result) * 4)
6618 + (integer_onep (equal_result) * 2)
6619 + integer_onep (low_result))
6620 {
6621 case 0:
6622 /* Always false. */
6623 return omit_one_operand (type, integer_zero_node, arg0);
6624 case 1:
6625 code = LT_EXPR;
6626 break;
6627 case 2:
6628 code = EQ_EXPR;
6629 break;
6630 case 3:
6631 code = LE_EXPR;
6632 break;
6633 case 4:
6634 code = GT_EXPR;
6635 break;
6636 case 5:
6637 code = NE_EXPR;
6638 break;
6639 case 6:
6640 code = GE_EXPR;
6641 break;
6642 case 7:
6643 /* Always true. */
6644 return omit_one_operand (type, integer_one_node, arg0);
6645 }
6646
6647 t = build (code, type, cval1, cval2);
6648 if (save_p)
6649 return save_expr (t);
6650 else
6651 return fold (t);
6652 }
6653 }
6654 }
6655
6656 /* If this is a comparison of a field, we may be able to simplify it. */
6657 if ((TREE_CODE (arg0) == COMPONENT_REF
6658 || TREE_CODE (arg0) == BIT_FIELD_REF)
6659 && (code == EQ_EXPR || code == NE_EXPR)
6660 /* Handle the constant case even without -O
6661 to make sure the warnings are given. */
6662 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
6663 {
6664 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
6665 return t1 ? t1 : t;
6666 }
6667
6668 /* If this is a comparison of complex values and either or both sides
6669 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
6670 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
6671 This may prevent needless evaluations. */
6672 if ((code == EQ_EXPR || code == NE_EXPR)
6673 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
6674 && (TREE_CODE (arg0) == COMPLEX_EXPR
6675 || TREE_CODE (arg1) == COMPLEX_EXPR
6676 || TREE_CODE (arg0) == COMPLEX_CST
6677 || TREE_CODE (arg1) == COMPLEX_CST))
6678 {
6679 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
6680 tree real0, imag0, real1, imag1;
6681
6682 arg0 = save_expr (arg0);
6683 arg1 = save_expr (arg1);
6684 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
6685 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
6686 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
6687 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
6688
6689 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
6690 : TRUTH_ORIF_EXPR),
6691 type,
6692 fold (build (code, type, real0, real1)),
6693 fold (build (code, type, imag0, imag1))));
6694 }
6695
6696 /* From here on, the only cases we handle are when the result is
6697 known to be a constant.
6698
6699 To compute GT, swap the arguments and do LT.
6700 To compute GE, do LT and invert the result.
6701 To compute LE, swap the arguments, do LT and invert the result.
6702 To compute NE, do EQ and invert the result.
6703
6704 Therefore, the code below must handle only EQ and LT. */
6705
6706 if (code == LE_EXPR || code == GT_EXPR)
6707 {
6708 tem = arg0, arg0 = arg1, arg1 = tem;
6709 code = swap_tree_comparison (code);
6710 }
6711
6712 /* Note that it is safe to invert for real values here because we
6713 will check below in the one case that it matters. */
6714
6715 t1 = NULL_TREE;
6716 invert = 0;
6717 if (code == NE_EXPR || code == GE_EXPR)
6718 {
6719 invert = 1;
6720 code = invert_tree_comparison (code);
6721 }
6722
6723 /* Compute a result for LT or EQ if args permit;
6724 otherwise return T. */
6725 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
6726 {
6727 if (code == EQ_EXPR)
6728 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
6729 else
6730 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
6731 ? INT_CST_LT_UNSIGNED (arg0, arg1)
6732 : INT_CST_LT (arg0, arg1)),
6733 0);
6734 }
6735
6736 #if 0 /* This is no longer useful, but breaks some real code. */
6737 /* Assume a nonexplicit constant cannot equal an explicit one,
6738 since such code would be undefined anyway.
6739 Exception: on sysvr4, using #pragma weak,
6740 a label can come out as 0. */
6741 else if (TREE_CODE (arg1) == INTEGER_CST
6742 && !integer_zerop (arg1)
6743 && TREE_CONSTANT (arg0)
6744 && TREE_CODE (arg0) == ADDR_EXPR
6745 && code == EQ_EXPR)
6746 t1 = build_int_2 (0, 0);
6747 #endif
6748 /* Two real constants can be compared explicitly. */
6749 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
6750 {
6751 /* If either operand is a NaN, the result is false with two
6752 exceptions: First, an NE_EXPR is true on NaNs, but that case
6753 is already handled correctly since we will be inverting the
6754 result for NE_EXPR. Second, if we had inverted a LE_EXPR
6755 or a GE_EXPR into a LT_EXPR, we must return true so that it
6756 will be inverted into false. */
6757
6758 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
6759 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
6760 t1 = build_int_2 (invert && code == LT_EXPR, 0);
6761
6762 else if (code == EQ_EXPR)
6763 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
6764 TREE_REAL_CST (arg1)),
6765 0);
6766 else
6767 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
6768 TREE_REAL_CST (arg1)),
6769 0);
6770 }
6771
6772 if (t1 == NULL_TREE)
6773 return t;
6774
6775 if (invert)
6776 TREE_INT_CST_LOW (t1) ^= 1;
6777
6778 TREE_TYPE (t1) = type;
6779 if (TREE_CODE (type) == BOOLEAN_TYPE)
6780 return truthvalue_conversion (t1);
6781 return t1;
6782
6783 case COND_EXPR:
6784 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
6785 so all simple results must be passed through pedantic_non_lvalue. */
6786 if (TREE_CODE (arg0) == INTEGER_CST)
6787 return pedantic_non_lvalue
6788 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
6789 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
6790 return pedantic_omit_one_operand (type, arg1, arg0);
6791
6792 /* If the second operand is zero, invert the comparison and swap
6793 the second and third operands. Likewise if the second operand
6794 is constant and the third is not or if the third operand is
6795 equivalent to the first operand of the comparison. */
6796
6797 if (integer_zerop (arg1)
6798 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
6799 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6800 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6801 TREE_OPERAND (t, 2),
6802 TREE_OPERAND (arg0, 1))))
6803 {
6804 /* See if this can be inverted. If it can't, possibly because
6805 it was a floating-point inequality comparison, don't do
6806 anything. */
6807 tem = invert_truthvalue (arg0);
6808
6809 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
6810 {
6811 t = build (code, type, tem,
6812 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
6813 arg0 = tem;
6814 /* arg1 should be the first argument of the new T. */
6815 arg1 = TREE_OPERAND (t, 1);
6816 STRIP_NOPS (arg1);
6817 }
6818 }
6819
6820 /* If we have A op B ? A : C, we may be able to convert this to a
6821 simpler expression, depending on the operation and the values
6822 of B and C. IEEE floating point prevents this though,
6823 because A or B might be -0.0 or a NaN. */
6824
6825 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6826 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
6827 || ! FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
6828 || flag_fast_math)
6829 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6830 arg1, TREE_OPERAND (arg0, 1)))
6831 {
6832 tree arg2 = TREE_OPERAND (t, 2);
6833 enum tree_code comp_code = TREE_CODE (arg0);
6834
6835 STRIP_NOPS (arg2);
6836
6837 /* If we have A op 0 ? A : -A, this is A, -A, abs (A), or abs (-A),
6838 depending on the comparison operation. */
6839 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
6840 ? real_zerop (TREE_OPERAND (arg0, 1))
6841 : integer_zerop (TREE_OPERAND (arg0, 1)))
6842 && TREE_CODE (arg2) == NEGATE_EXPR
6843 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
6844 switch (comp_code)
6845 {
6846 case EQ_EXPR:
6847 return
6848 pedantic_non_lvalue (convert (type, negate_expr (arg1)));
6849 case NE_EXPR:
6850 return pedantic_non_lvalue (convert (type, arg1));
6851 case GE_EXPR:
6852 case GT_EXPR:
6853 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6854 arg1 = convert (signed_type (TREE_TYPE (arg1)), arg1);
6855 return pedantic_non_lvalue
6856 (convert (type, fold (build1 (ABS_EXPR,
6857 TREE_TYPE (arg1), arg1))));
6858 case LE_EXPR:
6859 case LT_EXPR:
6860 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6861 arg1 = convert (signed_type (TREE_TYPE (arg1)), arg1);
6862 return pedantic_non_lvalue
6863 (negate_expr (convert (type,
6864 fold (build1 (ABS_EXPR,
6865 TREE_TYPE (arg1),
6866 arg1)))));
6867 default:
6868 abort ();
6869 }
6870
6871 /* If this is A != 0 ? A : 0, this is simply A. For ==, it is
6872 always zero. */
6873
6874 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
6875 {
6876 if (comp_code == NE_EXPR)
6877 return pedantic_non_lvalue (convert (type, arg1));
6878 else if (comp_code == EQ_EXPR)
6879 return pedantic_non_lvalue (convert (type, integer_zero_node));
6880 }
6881
6882 /* If this is A op B ? A : B, this is either A, B, min (A, B),
6883 or max (A, B), depending on the operation. */
6884
6885 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
6886 arg2, TREE_OPERAND (arg0, 0)))
6887 {
6888 tree comp_op0 = TREE_OPERAND (arg0, 0);
6889 tree comp_op1 = TREE_OPERAND (arg0, 1);
6890 tree comp_type = TREE_TYPE (comp_op0);
6891
6892 switch (comp_code)
6893 {
6894 case EQ_EXPR:
6895 return pedantic_non_lvalue (convert (type, arg2));
6896 case NE_EXPR:
6897 return pedantic_non_lvalue (convert (type, arg1));
6898 case LE_EXPR:
6899 case LT_EXPR:
6900 /* In C++ a ?: expression can be an lvalue, so put the
6901 operand which will be used if they are equal first
6902 so that we can convert this back to the
6903 corresponding COND_EXPR. */
6904 return pedantic_non_lvalue
6905 (convert (type, (fold (build (MIN_EXPR, comp_type,
6906 (comp_code == LE_EXPR
6907 ? comp_op0 : comp_op1),
6908 (comp_code == LE_EXPR
6909 ? comp_op1 : comp_op0))))));
6910 break;
6911 case GE_EXPR:
6912 case GT_EXPR:
6913 return pedantic_non_lvalue
6914 (convert (type, fold (build (MAX_EXPR, comp_type,
6915 (comp_code == GE_EXPR
6916 ? comp_op0 : comp_op1),
6917 (comp_code == GE_EXPR
6918 ? comp_op1 : comp_op0)))));
6919 break;
6920 default:
6921 abort ();
6922 }
6923 }
6924
6925 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
6926 we might still be able to simplify this. For example,
6927 if C1 is one less or one more than C2, this might have started
6928 out as a MIN or MAX and been transformed by this function.
6929 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
6930
6931 if (INTEGRAL_TYPE_P (type)
6932 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6933 && TREE_CODE (arg2) == INTEGER_CST)
6934 switch (comp_code)
6935 {
6936 case EQ_EXPR:
6937 /* We can replace A with C1 in this case. */
6938 arg1 = convert (type, TREE_OPERAND (arg0, 1));
6939 t = build (code, type, TREE_OPERAND (t, 0), arg1,
6940 TREE_OPERAND (t, 2));
6941 break;
6942
6943 case LT_EXPR:
6944 /* If C1 is C2 + 1, this is min(A, C2). */
6945 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6946 && operand_equal_p (TREE_OPERAND (arg0, 1),
6947 const_binop (PLUS_EXPR, arg2,
6948 integer_one_node, 0), 1))
6949 return pedantic_non_lvalue
6950 (fold (build (MIN_EXPR, type, arg1, arg2)));
6951 break;
6952
6953 case LE_EXPR:
6954 /* If C1 is C2 - 1, this is min(A, C2). */
6955 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
6956 && operand_equal_p (TREE_OPERAND (arg0, 1),
6957 const_binop (MINUS_EXPR, arg2,
6958 integer_one_node, 0), 1))
6959 return pedantic_non_lvalue
6960 (fold (build (MIN_EXPR, type, arg1, arg2)));
6961 break;
6962
6963 case GT_EXPR:
6964 /* If C1 is C2 - 1, this is max(A, C2). */
6965 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
6966 && operand_equal_p (TREE_OPERAND (arg0, 1),
6967 const_binop (MINUS_EXPR, arg2,
6968 integer_one_node, 0), 1))
6969 return pedantic_non_lvalue
6970 (fold (build (MAX_EXPR, type, arg1, arg2)));
6971 break;
6972
6973 case GE_EXPR:
6974 /* If C1 is C2 + 1, this is max(A, C2). */
6975 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6976 && operand_equal_p (TREE_OPERAND (arg0, 1),
6977 const_binop (PLUS_EXPR, arg2,
6978 integer_one_node, 0), 1))
6979 return pedantic_non_lvalue
6980 (fold (build (MAX_EXPR, type, arg1, arg2)));
6981 break;
6982 case NE_EXPR:
6983 break;
6984 default:
6985 abort ();
6986 }
6987 }
6988
6989 /* If the second operand is simpler than the third, swap them
6990 since that produces better jump optimization results. */
6991 if ((TREE_CONSTANT (arg1) || DECL_P (arg1)
6992 || TREE_CODE (arg1) == SAVE_EXPR)
6993 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
6994 || DECL_P (TREE_OPERAND (t, 2))
6995 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
6996 {
6997 /* See if this can be inverted. If it can't, possibly because
6998 it was a floating-point inequality comparison, don't do
6999 anything. */
7000 tem = invert_truthvalue (arg0);
7001
7002 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7003 {
7004 t = build (code, type, tem,
7005 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
7006 arg0 = tem;
7007 /* arg1 should be the first argument of the new T. */
7008 arg1 = TREE_OPERAND (t, 1);
7009 STRIP_NOPS (arg1);
7010 }
7011 }
7012
7013 /* Convert A ? 1 : 0 to simply A. */
7014 if (integer_onep (TREE_OPERAND (t, 1))
7015 && integer_zerop (TREE_OPERAND (t, 2))
7016 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
7017 call to fold will try to move the conversion inside
7018 a COND, which will recurse. In that case, the COND_EXPR
7019 is probably the best choice, so leave it alone. */
7020 && type == TREE_TYPE (arg0))
7021 return pedantic_non_lvalue (arg0);
7022
7023 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
7024 operation is simply A & 2. */
7025
7026 if (integer_zerop (TREE_OPERAND (t, 2))
7027 && TREE_CODE (arg0) == NE_EXPR
7028 && integer_zerop (TREE_OPERAND (arg0, 1))
7029 && integer_pow2p (arg1)
7030 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
7031 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
7032 arg1, 1))
7033 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
7034
7035 return t;
7036
7037 case COMPOUND_EXPR:
7038 /* When pedantic, a compound expression can be neither an lvalue
7039 nor an integer constant expression. */
7040 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
7041 return t;
7042 /* Don't let (0, 0) be null pointer constant. */
7043 if (integer_zerop (arg1))
7044 return build1 (NOP_EXPR, type, arg1);
7045 return convert (type, arg1);
7046
7047 case COMPLEX_EXPR:
7048 if (wins)
7049 return build_complex (type, arg0, arg1);
7050 return t;
7051
7052 case REALPART_EXPR:
7053 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7054 return t;
7055 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7056 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7057 TREE_OPERAND (arg0, 1));
7058 else if (TREE_CODE (arg0) == COMPLEX_CST)
7059 return TREE_REALPART (arg0);
7060 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7061 return fold (build (TREE_CODE (arg0), type,
7062 fold (build1 (REALPART_EXPR, type,
7063 TREE_OPERAND (arg0, 0))),
7064 fold (build1 (REALPART_EXPR,
7065 type, TREE_OPERAND (arg0, 1)))));
7066 return t;
7067
7068 case IMAGPART_EXPR:
7069 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7070 return convert (type, integer_zero_node);
7071 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7072 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7073 TREE_OPERAND (arg0, 0));
7074 else if (TREE_CODE (arg0) == COMPLEX_CST)
7075 return TREE_IMAGPART (arg0);
7076 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7077 return fold (build (TREE_CODE (arg0), type,
7078 fold (build1 (IMAGPART_EXPR, type,
7079 TREE_OPERAND (arg0, 0))),
7080 fold (build1 (IMAGPART_EXPR, type,
7081 TREE_OPERAND (arg0, 1)))));
7082 return t;
7083
7084 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
7085 appropriate. */
7086 case CLEANUP_POINT_EXPR:
7087 if (! has_cleanups (arg0))
7088 return TREE_OPERAND (t, 0);
7089
7090 {
7091 enum tree_code code0 = TREE_CODE (arg0);
7092 int kind0 = TREE_CODE_CLASS (code0);
7093 tree arg00 = TREE_OPERAND (arg0, 0);
7094 tree arg01;
7095
7096 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
7097 return fold (build1 (code0, type,
7098 fold (build1 (CLEANUP_POINT_EXPR,
7099 TREE_TYPE (arg00), arg00))));
7100
7101 if (kind0 == '<' || kind0 == '2'
7102 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
7103 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
7104 || code0 == TRUTH_XOR_EXPR)
7105 {
7106 arg01 = TREE_OPERAND (arg0, 1);
7107
7108 if (TREE_CONSTANT (arg00)
7109 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
7110 && ! has_cleanups (arg00)))
7111 return fold (build (code0, type, arg00,
7112 fold (build1 (CLEANUP_POINT_EXPR,
7113 TREE_TYPE (arg01), arg01))));
7114
7115 if (TREE_CONSTANT (arg01))
7116 return fold (build (code0, type,
7117 fold (build1 (CLEANUP_POINT_EXPR,
7118 TREE_TYPE (arg00), arg00)),
7119 arg01));
7120 }
7121
7122 return t;
7123 }
7124
7125 default:
7126 return t;
7127 } /* switch (code) */
7128 }
7129
7130 /* Determine if first argument is a multiple of second argument. Return 0 if
7131 it is not, or we cannot easily determined it to be.
7132
7133 An example of the sort of thing we care about (at this point; this routine
7134 could surely be made more general, and expanded to do what the *_DIV_EXPR's
7135 fold cases do now) is discovering that
7136
7137 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7138
7139 is a multiple of
7140
7141 SAVE_EXPR (J * 8)
7142
7143 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
7144
7145 This code also handles discovering that
7146
7147 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7148
7149 is a multiple of 8 so we don't have to worry about dealing with a
7150 possible remainder.
7151
7152 Note that we *look* inside a SAVE_EXPR only to determine how it was
7153 calculated; it is not safe for fold to do much of anything else with the
7154 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
7155 at run time. For example, the latter example above *cannot* be implemented
7156 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
7157 evaluation time of the original SAVE_EXPR is not necessarily the same at
7158 the time the new expression is evaluated. The only optimization of this
7159 sort that would be valid is changing
7160
7161 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
7162
7163 divided by 8 to
7164
7165 SAVE_EXPR (I) * SAVE_EXPR (J)
7166
7167 (where the same SAVE_EXPR (J) is used in the original and the
7168 transformed version). */
7169
7170 static int
7171 multiple_of_p (type, top, bottom)
7172 tree type;
7173 tree top;
7174 tree bottom;
7175 {
7176 if (operand_equal_p (top, bottom, 0))
7177 return 1;
7178
7179 if (TREE_CODE (type) != INTEGER_TYPE)
7180 return 0;
7181
7182 switch (TREE_CODE (top))
7183 {
7184 case MULT_EXPR:
7185 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7186 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7187
7188 case PLUS_EXPR:
7189 case MINUS_EXPR:
7190 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7191 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7192
7193 case NOP_EXPR:
7194 /* Can't handle conversions from non-integral or wider integral type. */
7195 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
7196 || (TYPE_PRECISION (type)
7197 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
7198 return 0;
7199
7200 /* .. fall through ... */
7201
7202 case SAVE_EXPR:
7203 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
7204
7205 case INTEGER_CST:
7206 if ((TREE_CODE (bottom) != INTEGER_CST)
7207 || (tree_int_cst_sgn (top) < 0)
7208 || (tree_int_cst_sgn (bottom) < 0))
7209 return 0;
7210 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
7211 top, bottom, 0));
7212
7213 default:
7214 return 0;
7215 }
7216 }