99b913a284e4293948d0ae75eea719aa3cf46936
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 88, 92-97, 1998 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 /*@@ This file should be rewritten to use an arbitrary precision
22 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
23 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
24 @@ The routines that translate from the ap rep should
25 @@ warn if precision et. al. is lost.
26 @@ This would also make life easier when this technology is used
27 @@ for cross-compilers. */
28
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
44
45 #include "config.h"
46 #include "system.h"
47 #include <setjmp.h>
48 #include "flags.h"
49 #include "tree.h"
50
51 /* Handle floating overflow for `const_binop'. */
52 static jmp_buf float_error;
53
54 static void encode PROTO((HOST_WIDE_INT *,
55 HOST_WIDE_INT, HOST_WIDE_INT));
56 static void decode PROTO((HOST_WIDE_INT *,
57 HOST_WIDE_INT *, HOST_WIDE_INT *));
58 int div_and_round_double PROTO((enum tree_code, int, HOST_WIDE_INT,
59 HOST_WIDE_INT, HOST_WIDE_INT,
60 HOST_WIDE_INT, HOST_WIDE_INT *,
61 HOST_WIDE_INT *, HOST_WIDE_INT *,
62 HOST_WIDE_INT *));
63 static int split_tree PROTO((tree, enum tree_code, tree *,
64 tree *, int *));
65 static tree int_const_binop PROTO((enum tree_code, tree, tree, int, int));
66 static tree const_binop PROTO((enum tree_code, tree, tree, int));
67 static tree fold_convert PROTO((tree, tree));
68 static enum tree_code invert_tree_comparison PROTO((enum tree_code));
69 static enum tree_code swap_tree_comparison PROTO((enum tree_code));
70 static int truth_value_p PROTO((enum tree_code));
71 static int operand_equal_for_comparison_p PROTO((tree, tree, tree));
72 static int twoval_comparison_p PROTO((tree, tree *, tree *, int *));
73 static tree eval_subst PROTO((tree, tree, tree, tree, tree));
74 static tree omit_one_operand PROTO((tree, tree, tree));
75 static tree pedantic_omit_one_operand PROTO((tree, tree, tree));
76 static tree distribute_bit_expr PROTO((enum tree_code, tree, tree, tree));
77 static tree make_bit_field_ref PROTO((tree, tree, int, int, int));
78 static tree optimize_bit_field_compare PROTO((enum tree_code, tree,
79 tree, tree));
80 static tree decode_field_reference PROTO((tree, int *, int *,
81 enum machine_mode *, int *,
82 int *, tree *, tree *));
83 static int all_ones_mask_p PROTO((tree, int));
84 static int simple_operand_p PROTO((tree));
85 static tree range_binop PROTO((enum tree_code, tree, tree, int,
86 tree, int));
87 static tree make_range PROTO((tree, int *, tree *, tree *));
88 static tree build_range_check PROTO((tree, tree, int, tree, tree));
89 static int merge_ranges PROTO((int *, tree *, tree *, int, tree, tree,
90 int, tree, tree));
91 static tree fold_range_test PROTO((tree));
92 static tree unextend PROTO((tree, int, int, tree));
93 static tree fold_truthop PROTO((enum tree_code, tree, tree, tree));
94 static tree strip_compound_expr PROTO((tree, tree));
95 static int multiple_of_p PROTO((tree, tree, tree));
96
97 #ifndef BRANCH_COST
98 #define BRANCH_COST 1
99 #endif
100
101 /* Suppose A1 + B1 = SUM1, using 2's complement arithmetic ignoring overflow.
102 Suppose A, B and SUM have the same respective signs as A1, B1, and SUM1.
103 Then this yields nonzero if overflow occurred during the addition.
104 Overflow occurs if A and B have the same sign, but A and SUM differ in sign.
105 Use `^' to test whether signs differ, and `< 0' to isolate the sign. */
106 #define overflow_sum_sign(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
107 \f
108 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
109 We do that by representing the two-word integer in 4 words, with only
110 HOST_BITS_PER_WIDE_INT/2 bits stored in each word, as a positive number. */
111
112 #define LOWPART(x) \
113 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT/2)) - 1))
114 #define HIGHPART(x) \
115 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT/2)
116 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT/2)
117
118 /* Unpack a two-word integer into 4 words.
119 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
120 WORDS points to the array of HOST_WIDE_INTs. */
121
122 static void
123 encode (words, low, hi)
124 HOST_WIDE_INT *words;
125 HOST_WIDE_INT low, hi;
126 {
127 words[0] = LOWPART (low);
128 words[1] = HIGHPART (low);
129 words[2] = LOWPART (hi);
130 words[3] = HIGHPART (hi);
131 }
132
133 /* Pack an array of 4 words into a two-word integer.
134 WORDS points to the array of words.
135 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
136
137 static void
138 decode (words, low, hi)
139 HOST_WIDE_INT *words;
140 HOST_WIDE_INT *low, *hi;
141 {
142 *low = words[0] | words[1] * BASE;
143 *hi = words[2] | words[3] * BASE;
144 }
145 \f
146 /* Make the integer constant T valid for its type
147 by setting to 0 or 1 all the bits in the constant
148 that don't belong in the type.
149 Yield 1 if a signed overflow occurs, 0 otherwise.
150 If OVERFLOW is nonzero, a signed overflow has already occurred
151 in calculating T, so propagate it.
152
153 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
154 if it exists. */
155
156 int
157 force_fit_type (t, overflow)
158 tree t;
159 int overflow;
160 {
161 HOST_WIDE_INT low, high;
162 register int prec;
163
164 if (TREE_CODE (t) == REAL_CST)
165 {
166 #ifdef CHECK_FLOAT_VALUE
167 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t)), TREE_REAL_CST (t),
168 overflow);
169 #endif
170 return overflow;
171 }
172
173 else if (TREE_CODE (t) != INTEGER_CST)
174 return overflow;
175
176 low = TREE_INT_CST_LOW (t);
177 high = TREE_INT_CST_HIGH (t);
178
179 if (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE)
180 prec = POINTER_SIZE;
181 else
182 prec = TYPE_PRECISION (TREE_TYPE (t));
183
184 /* First clear all bits that are beyond the type's precision. */
185
186 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
187 ;
188 else if (prec > HOST_BITS_PER_WIDE_INT)
189 {
190 TREE_INT_CST_HIGH (t)
191 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
192 }
193 else
194 {
195 TREE_INT_CST_HIGH (t) = 0;
196 if (prec < HOST_BITS_PER_WIDE_INT)
197 TREE_INT_CST_LOW (t) &= ~((HOST_WIDE_INT) (-1) << prec);
198 }
199
200 /* Unsigned types do not suffer sign extension or overflow. */
201 if (TREE_UNSIGNED (TREE_TYPE (t)))
202 return overflow;
203
204 /* If the value's sign bit is set, extend the sign. */
205 if (prec != 2 * HOST_BITS_PER_WIDE_INT
206 && (prec > HOST_BITS_PER_WIDE_INT
207 ? (TREE_INT_CST_HIGH (t)
208 & ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
209 : TREE_INT_CST_LOW (t) & ((HOST_WIDE_INT) 1 << (prec - 1))))
210 {
211 /* Value is negative:
212 set to 1 all the bits that are outside this type's precision. */
213 if (prec > HOST_BITS_PER_WIDE_INT)
214 {
215 TREE_INT_CST_HIGH (t)
216 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
217 }
218 else
219 {
220 TREE_INT_CST_HIGH (t) = -1;
221 if (prec < HOST_BITS_PER_WIDE_INT)
222 TREE_INT_CST_LOW (t) |= ((HOST_WIDE_INT) (-1) << prec);
223 }
224 }
225
226 /* Yield nonzero if signed overflow occurred. */
227 return
228 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
229 != 0);
230 }
231 \f
232 /* Add two doubleword integers with doubleword result.
233 Each argument is given as two `HOST_WIDE_INT' pieces.
234 One argument is L1 and H1; the other, L2 and H2.
235 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
236
237 int
238 add_double (l1, h1, l2, h2, lv, hv)
239 HOST_WIDE_INT l1, h1, l2, h2;
240 HOST_WIDE_INT *lv, *hv;
241 {
242 HOST_WIDE_INT l, h;
243
244 l = l1 + l2;
245 h = h1 + h2 + ((unsigned HOST_WIDE_INT) l < l1);
246
247 *lv = l;
248 *hv = h;
249 return overflow_sum_sign (h1, h2, h);
250 }
251
252 /* Negate a doubleword integer with doubleword result.
253 Return nonzero if the operation overflows, assuming it's signed.
254 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
255 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
256
257 int
258 neg_double (l1, h1, lv, hv)
259 HOST_WIDE_INT l1, h1;
260 HOST_WIDE_INT *lv, *hv;
261 {
262 if (l1 == 0)
263 {
264 *lv = 0;
265 *hv = - h1;
266 return (*hv & h1) < 0;
267 }
268 else
269 {
270 *lv = - l1;
271 *hv = ~ h1;
272 return 0;
273 }
274 }
275 \f
276 /* Multiply two doubleword integers with doubleword result.
277 Return nonzero if the operation overflows, assuming it's signed.
278 Each argument is given as two `HOST_WIDE_INT' pieces.
279 One argument is L1 and H1; the other, L2 and H2.
280 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
281
282 int
283 mul_double (l1, h1, l2, h2, lv, hv)
284 HOST_WIDE_INT l1, h1, l2, h2;
285 HOST_WIDE_INT *lv, *hv;
286 {
287 HOST_WIDE_INT arg1[4];
288 HOST_WIDE_INT arg2[4];
289 HOST_WIDE_INT prod[4 * 2];
290 register unsigned HOST_WIDE_INT carry;
291 register int i, j, k;
292 HOST_WIDE_INT toplow, tophigh, neglow, neghigh;
293
294 encode (arg1, l1, h1);
295 encode (arg2, l2, h2);
296
297 bzero ((char *) prod, sizeof prod);
298
299 for (i = 0; i < 4; i++)
300 {
301 carry = 0;
302 for (j = 0; j < 4; j++)
303 {
304 k = i + j;
305 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
306 carry += arg1[i] * arg2[j];
307 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
308 carry += prod[k];
309 prod[k] = LOWPART (carry);
310 carry = HIGHPART (carry);
311 }
312 prod[i + 4] = carry;
313 }
314
315 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
316
317 /* Check for overflow by calculating the top half of the answer in full;
318 it should agree with the low half's sign bit. */
319 decode (prod+4, &toplow, &tophigh);
320 if (h1 < 0)
321 {
322 neg_double (l2, h2, &neglow, &neghigh);
323 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
324 }
325 if (h2 < 0)
326 {
327 neg_double (l1, h1, &neglow, &neghigh);
328 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
329 }
330 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
331 }
332 \f
333 /* Shift the doubleword integer in L1, H1 left by COUNT places
334 keeping only PREC bits of result.
335 Shift right if COUNT is negative.
336 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
337 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
338
339 void
340 lshift_double (l1, h1, count, prec, lv, hv, arith)
341 HOST_WIDE_INT l1, h1, count;
342 int prec;
343 HOST_WIDE_INT *lv, *hv;
344 int arith;
345 {
346 if (count < 0)
347 {
348 rshift_double (l1, h1, - count, prec, lv, hv, arith);
349 return;
350 }
351
352 #ifdef SHIFT_COUNT_TRUNCATED
353 if (SHIFT_COUNT_TRUNCATED)
354 count %= prec;
355 #endif
356
357 if (count >= HOST_BITS_PER_WIDE_INT)
358 {
359 *hv = (unsigned HOST_WIDE_INT) l1 << (count - HOST_BITS_PER_WIDE_INT);
360 *lv = 0;
361 }
362 else
363 {
364 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
365 | ((unsigned HOST_WIDE_INT) l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
366 *lv = (unsigned HOST_WIDE_INT) l1 << count;
367 }
368 }
369
370 /* Shift the doubleword integer in L1, H1 right by COUNT places
371 keeping only PREC bits of result. COUNT must be positive.
372 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
373 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
374
375 void
376 rshift_double (l1, h1, count, prec, lv, hv, arith)
377 HOST_WIDE_INT l1, h1, count;
378 int prec;
379 HOST_WIDE_INT *lv, *hv;
380 int arith;
381 {
382 unsigned HOST_WIDE_INT signmask;
383 signmask = (arith
384 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
385 : 0);
386
387 #ifdef SHIFT_COUNT_TRUNCATED
388 if (SHIFT_COUNT_TRUNCATED)
389 count %= prec;
390 #endif
391
392 if (count >= HOST_BITS_PER_WIDE_INT)
393 {
394 *hv = signmask;
395 *lv = ((signmask << (2 * HOST_BITS_PER_WIDE_INT - count - 1) << 1)
396 | ((unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT)));
397 }
398 else
399 {
400 *lv = (((unsigned HOST_WIDE_INT) l1 >> count)
401 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
402 *hv = ((signmask << (HOST_BITS_PER_WIDE_INT - count))
403 | ((unsigned HOST_WIDE_INT) h1 >> count));
404 }
405 }
406 \f
407 /* Rotate the doubleword integer in L1, H1 left by COUNT places
408 keeping only PREC bits of result.
409 Rotate right if COUNT is negative.
410 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
411
412 void
413 lrotate_double (l1, h1, count, prec, lv, hv)
414 HOST_WIDE_INT l1, h1, count;
415 int prec;
416 HOST_WIDE_INT *lv, *hv;
417 {
418 HOST_WIDE_INT s1l, s1h, s2l, s2h;
419
420 count %= prec;
421 if (count < 0)
422 count += prec;
423
424 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
425 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
426 *lv = s1l | s2l;
427 *hv = s1h | s2h;
428 }
429
430 /* Rotate the doubleword integer in L1, H1 left by COUNT places
431 keeping only PREC bits of result. COUNT must be positive.
432 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
433
434 void
435 rrotate_double (l1, h1, count, prec, lv, hv)
436 HOST_WIDE_INT l1, h1, count;
437 int prec;
438 HOST_WIDE_INT *lv, *hv;
439 {
440 HOST_WIDE_INT s1l, s1h, s2l, s2h;
441
442 count %= prec;
443 if (count < 0)
444 count += prec;
445
446 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
447 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
448 *lv = s1l | s2l;
449 *hv = s1h | s2h;
450 }
451 \f
452 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
453 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
454 CODE is a tree code for a kind of division, one of
455 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
456 or EXACT_DIV_EXPR
457 It controls how the quotient is rounded to a integer.
458 Return nonzero if the operation overflows.
459 UNS nonzero says do unsigned division. */
460
461 int
462 div_and_round_double (code, uns,
463 lnum_orig, hnum_orig, lden_orig, hden_orig,
464 lquo, hquo, lrem, hrem)
465 enum tree_code code;
466 int uns;
467 HOST_WIDE_INT lnum_orig, hnum_orig; /* num == numerator == dividend */
468 HOST_WIDE_INT lden_orig, hden_orig; /* den == denominator == divisor */
469 HOST_WIDE_INT *lquo, *hquo, *lrem, *hrem;
470 {
471 int quo_neg = 0;
472 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
473 HOST_WIDE_INT den[4], quo[4];
474 register int i, j;
475 unsigned HOST_WIDE_INT work;
476 register unsigned HOST_WIDE_INT carry = 0;
477 HOST_WIDE_INT lnum = lnum_orig;
478 HOST_WIDE_INT hnum = hnum_orig;
479 HOST_WIDE_INT lden = lden_orig;
480 HOST_WIDE_INT hden = hden_orig;
481 int overflow = 0;
482
483 if ((hden == 0) && (lden == 0))
484 overflow = 1, lden = 1;
485
486 /* calculate quotient sign and convert operands to unsigned. */
487 if (!uns)
488 {
489 if (hnum < 0)
490 {
491 quo_neg = ~ quo_neg;
492 /* (minimum integer) / (-1) is the only overflow case. */
493 if (neg_double (lnum, hnum, &lnum, &hnum) && (lden & hden) == -1)
494 overflow = 1;
495 }
496 if (hden < 0)
497 {
498 quo_neg = ~ quo_neg;
499 neg_double (lden, hden, &lden, &hden);
500 }
501 }
502
503 if (hnum == 0 && hden == 0)
504 { /* single precision */
505 *hquo = *hrem = 0;
506 /* This unsigned division rounds toward zero. */
507 *lquo = lnum / (unsigned HOST_WIDE_INT) lden;
508 goto finish_up;
509 }
510
511 if (hnum == 0)
512 { /* trivial case: dividend < divisor */
513 /* hden != 0 already checked. */
514 *hquo = *lquo = 0;
515 *hrem = hnum;
516 *lrem = lnum;
517 goto finish_up;
518 }
519
520 bzero ((char *) quo, sizeof quo);
521
522 bzero ((char *) num, sizeof num); /* to zero 9th element */
523 bzero ((char *) den, sizeof den);
524
525 encode (num, lnum, hnum);
526 encode (den, lden, hden);
527
528 /* Special code for when the divisor < BASE. */
529 if (hden == 0 && lden < BASE)
530 {
531 /* hnum != 0 already checked. */
532 for (i = 4 - 1; i >= 0; i--)
533 {
534 work = num[i] + carry * BASE;
535 quo[i] = work / (unsigned HOST_WIDE_INT) lden;
536 carry = work % (unsigned HOST_WIDE_INT) lden;
537 }
538 }
539 else
540 {
541 /* Full double precision division,
542 with thanks to Don Knuth's "Seminumerical Algorithms". */
543 int num_hi_sig, den_hi_sig;
544 unsigned HOST_WIDE_INT quo_est, scale;
545
546 /* Find the highest non-zero divisor digit. */
547 for (i = 4 - 1; ; i--)
548 if (den[i] != 0) {
549 den_hi_sig = i;
550 break;
551 }
552
553 /* Insure that the first digit of the divisor is at least BASE/2.
554 This is required by the quotient digit estimation algorithm. */
555
556 scale = BASE / (den[den_hi_sig] + 1);
557 if (scale > 1) { /* scale divisor and dividend */
558 carry = 0;
559 for (i = 0; i <= 4 - 1; i++) {
560 work = (num[i] * scale) + carry;
561 num[i] = LOWPART (work);
562 carry = HIGHPART (work);
563 } num[4] = carry;
564 carry = 0;
565 for (i = 0; i <= 4 - 1; i++) {
566 work = (den[i] * scale) + carry;
567 den[i] = LOWPART (work);
568 carry = HIGHPART (work);
569 if (den[i] != 0) den_hi_sig = i;
570 }
571 }
572
573 num_hi_sig = 4;
574
575 /* Main loop */
576 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--) {
577 /* guess the next quotient digit, quo_est, by dividing the first
578 two remaining dividend digits by the high order quotient digit.
579 quo_est is never low and is at most 2 high. */
580 unsigned HOST_WIDE_INT tmp;
581
582 num_hi_sig = i + den_hi_sig + 1;
583 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
584 if (num[num_hi_sig] != den[den_hi_sig])
585 quo_est = work / den[den_hi_sig];
586 else
587 quo_est = BASE - 1;
588
589 /* refine quo_est so it's usually correct, and at most one high. */
590 tmp = work - quo_est * den[den_hi_sig];
591 if (tmp < BASE
592 && den[den_hi_sig - 1] * quo_est > (tmp * BASE + num[num_hi_sig - 2]))
593 quo_est--;
594
595 /* Try QUO_EST as the quotient digit, by multiplying the
596 divisor by QUO_EST and subtracting from the remaining dividend.
597 Keep in mind that QUO_EST is the I - 1st digit. */
598
599 carry = 0;
600 for (j = 0; j <= den_hi_sig; j++)
601 {
602 work = quo_est * den[j] + carry;
603 carry = HIGHPART (work);
604 work = num[i + j] - LOWPART (work);
605 num[i + j] = LOWPART (work);
606 carry += HIGHPART (work) != 0;
607 }
608
609 /* if quo_est was high by one, then num[i] went negative and
610 we need to correct things. */
611
612 if (num[num_hi_sig] < carry)
613 {
614 quo_est--;
615 carry = 0; /* add divisor back in */
616 for (j = 0; j <= den_hi_sig; j++)
617 {
618 work = num[i + j] + den[j] + carry;
619 carry = HIGHPART (work);
620 num[i + j] = LOWPART (work);
621 }
622 num [num_hi_sig] += carry;
623 }
624
625 /* store the quotient digit. */
626 quo[i] = quo_est;
627 }
628 }
629
630 decode (quo, lquo, hquo);
631
632 finish_up:
633 /* if result is negative, make it so. */
634 if (quo_neg)
635 neg_double (*lquo, *hquo, lquo, hquo);
636
637 /* compute trial remainder: rem = num - (quo * den) */
638 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
639 neg_double (*lrem, *hrem, lrem, hrem);
640 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
641
642 switch (code)
643 {
644 case TRUNC_DIV_EXPR:
645 case TRUNC_MOD_EXPR: /* round toward zero */
646 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
647 return overflow;
648
649 case FLOOR_DIV_EXPR:
650 case FLOOR_MOD_EXPR: /* round toward negative infinity */
651 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
652 {
653 /* quo = quo - 1; */
654 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
655 lquo, hquo);
656 }
657 else return overflow;
658 break;
659
660 case CEIL_DIV_EXPR:
661 case CEIL_MOD_EXPR: /* round toward positive infinity */
662 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
663 {
664 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
665 lquo, hquo);
666 }
667 else return overflow;
668 break;
669
670 case ROUND_DIV_EXPR:
671 case ROUND_MOD_EXPR: /* round to closest integer */
672 {
673 HOST_WIDE_INT labs_rem = *lrem, habs_rem = *hrem;
674 HOST_WIDE_INT labs_den = lden, habs_den = hden, ltwice, htwice;
675
676 /* get absolute values */
677 if (*hrem < 0) neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
678 if (hden < 0) neg_double (lden, hden, &labs_den, &habs_den);
679
680 /* if (2 * abs (lrem) >= abs (lden)) */
681 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
682 labs_rem, habs_rem, &ltwice, &htwice);
683 if (((unsigned HOST_WIDE_INT) habs_den
684 < (unsigned HOST_WIDE_INT) htwice)
685 || (((unsigned HOST_WIDE_INT) habs_den
686 == (unsigned HOST_WIDE_INT) htwice)
687 && ((HOST_WIDE_INT unsigned) labs_den
688 < (unsigned HOST_WIDE_INT) ltwice)))
689 {
690 if (*hquo < 0)
691 /* quo = quo - 1; */
692 add_double (*lquo, *hquo,
693 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
694 else
695 /* quo = quo + 1; */
696 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
697 lquo, hquo);
698 }
699 else return overflow;
700 }
701 break;
702
703 default:
704 abort ();
705 }
706
707 /* compute true remainder: rem = num - (quo * den) */
708 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
709 neg_double (*lrem, *hrem, lrem, hrem);
710 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
711 return overflow;
712 }
713 \f
714 #ifndef REAL_ARITHMETIC
715 /* Effectively truncate a real value to represent the nearest possible value
716 in a narrower mode. The result is actually represented in the same data
717 type as the argument, but its value is usually different.
718
719 A trap may occur during the FP operations and it is the responsibility
720 of the calling function to have a handler established. */
721
722 REAL_VALUE_TYPE
723 real_value_truncate (mode, arg)
724 enum machine_mode mode;
725 REAL_VALUE_TYPE arg;
726 {
727 return REAL_VALUE_TRUNCATE (mode, arg);
728 }
729
730 #if TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
731
732 /* Check for infinity in an IEEE double precision number. */
733
734 int
735 target_isinf (x)
736 REAL_VALUE_TYPE x;
737 {
738 /* The IEEE 64-bit double format. */
739 union {
740 REAL_VALUE_TYPE d;
741 struct {
742 unsigned sign : 1;
743 unsigned exponent : 11;
744 unsigned mantissa1 : 20;
745 unsigned mantissa2;
746 } little_endian;
747 struct {
748 unsigned mantissa2;
749 unsigned mantissa1 : 20;
750 unsigned exponent : 11;
751 unsigned sign : 1;
752 } big_endian;
753 } u;
754
755 u.d = dconstm1;
756 if (u.big_endian.sign == 1)
757 {
758 u.d = x;
759 return (u.big_endian.exponent == 2047
760 && u.big_endian.mantissa1 == 0
761 && u.big_endian.mantissa2 == 0);
762 }
763 else
764 {
765 u.d = x;
766 return (u.little_endian.exponent == 2047
767 && u.little_endian.mantissa1 == 0
768 && u.little_endian.mantissa2 == 0);
769 }
770 }
771
772 /* Check whether an IEEE double precision number is a NaN. */
773
774 int
775 target_isnan (x)
776 REAL_VALUE_TYPE x;
777 {
778 /* The IEEE 64-bit double format. */
779 union {
780 REAL_VALUE_TYPE d;
781 struct {
782 unsigned sign : 1;
783 unsigned exponent : 11;
784 unsigned mantissa1 : 20;
785 unsigned mantissa2;
786 } little_endian;
787 struct {
788 unsigned mantissa2;
789 unsigned mantissa1 : 20;
790 unsigned exponent : 11;
791 unsigned sign : 1;
792 } big_endian;
793 } u;
794
795 u.d = dconstm1;
796 if (u.big_endian.sign == 1)
797 {
798 u.d = x;
799 return (u.big_endian.exponent == 2047
800 && (u.big_endian.mantissa1 != 0
801 || u.big_endian.mantissa2 != 0));
802 }
803 else
804 {
805 u.d = x;
806 return (u.little_endian.exponent == 2047
807 && (u.little_endian.mantissa1 != 0
808 || u.little_endian.mantissa2 != 0));
809 }
810 }
811
812 /* Check for a negative IEEE double precision number. */
813
814 int
815 target_negative (x)
816 REAL_VALUE_TYPE x;
817 {
818 /* The IEEE 64-bit double format. */
819 union {
820 REAL_VALUE_TYPE d;
821 struct {
822 unsigned sign : 1;
823 unsigned exponent : 11;
824 unsigned mantissa1 : 20;
825 unsigned mantissa2;
826 } little_endian;
827 struct {
828 unsigned mantissa2;
829 unsigned mantissa1 : 20;
830 unsigned exponent : 11;
831 unsigned sign : 1;
832 } big_endian;
833 } u;
834
835 u.d = dconstm1;
836 if (u.big_endian.sign == 1)
837 {
838 u.d = x;
839 return u.big_endian.sign;
840 }
841 else
842 {
843 u.d = x;
844 return u.little_endian.sign;
845 }
846 }
847 #else /* Target not IEEE */
848
849 /* Let's assume other float formats don't have infinity.
850 (This can be overridden by redefining REAL_VALUE_ISINF.) */
851
852 target_isinf (x)
853 REAL_VALUE_TYPE x;
854 {
855 return 0;
856 }
857
858 /* Let's assume other float formats don't have NaNs.
859 (This can be overridden by redefining REAL_VALUE_ISNAN.) */
860
861 target_isnan (x)
862 REAL_VALUE_TYPE x;
863 {
864 return 0;
865 }
866
867 /* Let's assume other float formats don't have minus zero.
868 (This can be overridden by redefining REAL_VALUE_NEGATIVE.) */
869
870 target_negative (x)
871 REAL_VALUE_TYPE x;
872 {
873 return x < 0;
874 }
875 #endif /* Target not IEEE */
876
877 /* Try to change R into its exact multiplicative inverse in machine mode
878 MODE. Return nonzero function value if successful. */
879
880 int
881 exact_real_inverse (mode, r)
882 enum machine_mode mode;
883 REAL_VALUE_TYPE *r;
884 {
885 union
886 {
887 double d;
888 unsigned short i[4];
889 }x, t, y;
890 int i;
891
892 /* Usually disable if bounds checks are not reliable. */
893 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT) && !flag_pretend_float)
894 return 0;
895
896 /* Set array index to the less significant bits in the unions, depending
897 on the endian-ness of the host doubles.
898 Disable if insufficient information on the data structure. */
899 #if HOST_FLOAT_FORMAT == UNKNOWN_FLOAT_FORMAT
900 return 0;
901 #else
902 #if HOST_FLOAT_FORMAT == VAX_FLOAT_FORMAT
903 #define K 2
904 #else
905 #if HOST_FLOAT_FORMAT == IBM_FLOAT_FORMAT
906 #define K 2
907 #else
908 #define K (2 * HOST_FLOAT_WORDS_BIG_ENDIAN)
909 #endif
910 #endif
911 #endif
912
913 if (setjmp (float_error))
914 {
915 /* Don't do the optimization if there was an arithmetic error. */
916 fail:
917 set_float_handler (NULL_PTR);
918 return 0;
919 }
920 set_float_handler (float_error);
921
922 /* Domain check the argument. */
923 x.d = *r;
924 if (x.d == 0.0)
925 goto fail;
926
927 #ifdef REAL_INFINITY
928 if (REAL_VALUE_ISINF (x.d) || REAL_VALUE_ISNAN (x.d))
929 goto fail;
930 #endif
931
932 /* Compute the reciprocal and check for numerical exactness.
933 It is unnecessary to check all the significand bits to determine
934 whether X is a power of 2. If X is not, then it is impossible for
935 the bottom half significand of both X and 1/X to be all zero bits.
936 Hence we ignore the data structure of the top half and examine only
937 the low order bits of the two significands. */
938 t.d = 1.0 / x.d;
939 if (x.i[K] != 0 || x.i[K + 1] != 0 || t.i[K] != 0 || t.i[K + 1] != 0)
940 goto fail;
941
942 /* Truncate to the required mode and range-check the result. */
943 y.d = REAL_VALUE_TRUNCATE (mode, t.d);
944 #ifdef CHECK_FLOAT_VALUE
945 i = 0;
946 if (CHECK_FLOAT_VALUE (mode, y.d, i))
947 goto fail;
948 #endif
949
950 /* Fail if truncation changed the value. */
951 if (y.d != t.d || y.d == 0.0)
952 goto fail;
953
954 #ifdef REAL_INFINITY
955 if (REAL_VALUE_ISINF (y.d) || REAL_VALUE_ISNAN (y.d))
956 goto fail;
957 #endif
958
959 /* Output the reciprocal and return success flag. */
960 set_float_handler (NULL_PTR);
961 *r = y.d;
962 return 1;
963 }
964 #endif /* no REAL_ARITHMETIC */
965 \f
966 /* Split a tree IN into a constant and a variable part
967 that could be combined with CODE to make IN.
968 CODE must be a commutative arithmetic operation.
969 Store the constant part into *CONP and the variable in &VARP.
970 Return 1 if this was done; zero means the tree IN did not decompose
971 this way.
972
973 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.
974 Therefore, we must tell the caller whether the variable part
975 was subtracted. We do this by storing 1 or -1 into *VARSIGNP.
976 The value stored is the coefficient for the variable term.
977 The constant term we return should always be added;
978 we negate it if necessary. */
979
980 static int
981 split_tree (in, code, varp, conp, varsignp)
982 tree in;
983 enum tree_code code;
984 tree *varp, *conp;
985 int *varsignp;
986 {
987 register tree outtype = TREE_TYPE (in);
988 *varp = 0;
989 *conp = 0;
990
991 /* Strip any conversions that don't change the machine mode. */
992 while ((TREE_CODE (in) == NOP_EXPR
993 || TREE_CODE (in) == CONVERT_EXPR)
994 && (TYPE_MODE (TREE_TYPE (in))
995 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (in, 0)))))
996 in = TREE_OPERAND (in, 0);
997
998 if (TREE_CODE (in) == code
999 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1000 /* We can associate addition and subtraction together
1001 (even though the C standard doesn't say so)
1002 for integers because the value is not affected.
1003 For reals, the value might be affected, so we can't. */
1004 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1005 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1006 {
1007 enum tree_code code = TREE_CODE (TREE_OPERAND (in, 0));
1008 if (code == INTEGER_CST)
1009 {
1010 *conp = TREE_OPERAND (in, 0);
1011 *varp = TREE_OPERAND (in, 1);
1012 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1013 && TREE_TYPE (*varp) != outtype)
1014 *varp = convert (outtype, *varp);
1015 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
1016 return 1;
1017 }
1018 if (TREE_CONSTANT (TREE_OPERAND (in, 1)))
1019 {
1020 *conp = TREE_OPERAND (in, 1);
1021 *varp = TREE_OPERAND (in, 0);
1022 *varsignp = 1;
1023 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1024 && TREE_TYPE (*varp) != outtype)
1025 *varp = convert (outtype, *varp);
1026 if (TREE_CODE (in) == MINUS_EXPR)
1027 {
1028 /* If operation is subtraction and constant is second,
1029 must negate it to get an additive constant.
1030 And this cannot be done unless it is a manifest constant.
1031 It could also be the address of a static variable.
1032 We cannot negate that, so give up. */
1033 if (TREE_CODE (*conp) == INTEGER_CST)
1034 /* Subtracting from integer_zero_node loses for long long. */
1035 *conp = fold (build1 (NEGATE_EXPR, TREE_TYPE (*conp), *conp));
1036 else
1037 return 0;
1038 }
1039 return 1;
1040 }
1041 if (TREE_CONSTANT (TREE_OPERAND (in, 0)))
1042 {
1043 *conp = TREE_OPERAND (in, 0);
1044 *varp = TREE_OPERAND (in, 1);
1045 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1046 && TREE_TYPE (*varp) != outtype)
1047 *varp = convert (outtype, *varp);
1048 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
1049 return 1;
1050 }
1051 }
1052 return 0;
1053 }
1054 \f
1055 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1056 to produce a new constant.
1057
1058 If NOTRUNC is nonzero, do not truncate the result to fit the data type.
1059 If FORSIZE is nonzero, compute overflow for unsigned types. */
1060
1061 static tree
1062 int_const_binop (code, arg1, arg2, notrunc, forsize)
1063 enum tree_code code;
1064 register tree arg1, arg2;
1065 int notrunc, forsize;
1066 {
1067 HOST_WIDE_INT int1l, int1h, int2l, int2h;
1068 HOST_WIDE_INT low, hi;
1069 HOST_WIDE_INT garbagel, garbageh;
1070 register tree t;
1071 int uns = TREE_UNSIGNED (TREE_TYPE (arg1));
1072 int overflow = 0;
1073 int no_overflow = 0;
1074
1075 int1l = TREE_INT_CST_LOW (arg1);
1076 int1h = TREE_INT_CST_HIGH (arg1);
1077 int2l = TREE_INT_CST_LOW (arg2);
1078 int2h = TREE_INT_CST_HIGH (arg2);
1079
1080 switch (code)
1081 {
1082 case BIT_IOR_EXPR:
1083 low = int1l | int2l, hi = int1h | int2h;
1084 break;
1085
1086 case BIT_XOR_EXPR:
1087 low = int1l ^ int2l, hi = int1h ^ int2h;
1088 break;
1089
1090 case BIT_AND_EXPR:
1091 low = int1l & int2l, hi = int1h & int2h;
1092 break;
1093
1094 case BIT_ANDTC_EXPR:
1095 low = int1l & ~int2l, hi = int1h & ~int2h;
1096 break;
1097
1098 case RSHIFT_EXPR:
1099 int2l = - int2l;
1100 case LSHIFT_EXPR:
1101 /* It's unclear from the C standard whether shifts can overflow.
1102 The following code ignores overflow; perhaps a C standard
1103 interpretation ruling is needed. */
1104 lshift_double (int1l, int1h, int2l,
1105 TYPE_PRECISION (TREE_TYPE (arg1)),
1106 &low, &hi,
1107 !uns);
1108 no_overflow = 1;
1109 break;
1110
1111 case RROTATE_EXPR:
1112 int2l = - int2l;
1113 case LROTATE_EXPR:
1114 lrotate_double (int1l, int1h, int2l,
1115 TYPE_PRECISION (TREE_TYPE (arg1)),
1116 &low, &hi);
1117 break;
1118
1119 case PLUS_EXPR:
1120 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1121 break;
1122
1123 case MINUS_EXPR:
1124 neg_double (int2l, int2h, &low, &hi);
1125 add_double (int1l, int1h, low, hi, &low, &hi);
1126 overflow = overflow_sum_sign (hi, int2h, int1h);
1127 break;
1128
1129 case MULT_EXPR:
1130 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1131 break;
1132
1133 case TRUNC_DIV_EXPR:
1134 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1135 case EXACT_DIV_EXPR:
1136 /* This is a shortcut for a common special case. */
1137 if (int2h == 0 && int2l > 0
1138 && ! TREE_CONSTANT_OVERFLOW (arg1)
1139 && ! TREE_CONSTANT_OVERFLOW (arg2)
1140 && int1h == 0 && int1l >= 0)
1141 {
1142 if (code == CEIL_DIV_EXPR)
1143 int1l += int2l - 1;
1144 low = int1l / int2l, hi = 0;
1145 break;
1146 }
1147
1148 /* ... fall through ... */
1149
1150 case ROUND_DIV_EXPR:
1151 if (int2h == 0 && int2l == 1)
1152 {
1153 low = int1l, hi = int1h;
1154 break;
1155 }
1156 if (int1l == int2l && int1h == int2h
1157 && ! (int1l == 0 && int1h == 0))
1158 {
1159 low = 1, hi = 0;
1160 break;
1161 }
1162 overflow = div_and_round_double (code, uns,
1163 int1l, int1h, int2l, int2h,
1164 &low, &hi, &garbagel, &garbageh);
1165 break;
1166
1167 case TRUNC_MOD_EXPR:
1168 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1169 /* This is a shortcut for a common special case. */
1170 if (int2h == 0 && int2l > 0
1171 && ! TREE_CONSTANT_OVERFLOW (arg1)
1172 && ! TREE_CONSTANT_OVERFLOW (arg2)
1173 && int1h == 0 && int1l >= 0)
1174 {
1175 if (code == CEIL_MOD_EXPR)
1176 int1l += int2l - 1;
1177 low = int1l % int2l, hi = 0;
1178 break;
1179 }
1180
1181 /* ... fall through ... */
1182
1183 case ROUND_MOD_EXPR:
1184 overflow = div_and_round_double (code, uns,
1185 int1l, int1h, int2l, int2h,
1186 &garbagel, &garbageh, &low, &hi);
1187 break;
1188
1189 case MIN_EXPR:
1190 case MAX_EXPR:
1191 if (uns)
1192 {
1193 low = (((unsigned HOST_WIDE_INT) int1h
1194 < (unsigned HOST_WIDE_INT) int2h)
1195 || (((unsigned HOST_WIDE_INT) int1h
1196 == (unsigned HOST_WIDE_INT) int2h)
1197 && ((unsigned HOST_WIDE_INT) int1l
1198 < (unsigned HOST_WIDE_INT) int2l)));
1199 }
1200 else
1201 {
1202 low = ((int1h < int2h)
1203 || ((int1h == int2h)
1204 && ((unsigned HOST_WIDE_INT) int1l
1205 < (unsigned HOST_WIDE_INT) int2l)));
1206 }
1207 if (low == (code == MIN_EXPR))
1208 low = int1l, hi = int1h;
1209 else
1210 low = int2l, hi = int2h;
1211 break;
1212
1213 default:
1214 abort ();
1215 }
1216
1217 if (TREE_TYPE (arg1) == sizetype && hi == 0
1218 && low >= 0
1219 && (TYPE_MAX_VALUE (sizetype) == NULL
1220 || low <= TREE_INT_CST_LOW (TYPE_MAX_VALUE (sizetype)))
1221 && ! overflow
1222 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1223 t = size_int (low);
1224 else
1225 {
1226 t = build_int_2 (low, hi);
1227 TREE_TYPE (t) = TREE_TYPE (arg1);
1228 }
1229
1230 TREE_OVERFLOW (t)
1231 = ((notrunc ? (!uns || forsize) && overflow
1232 : force_fit_type (t, (!uns || forsize) && overflow) && ! no_overflow)
1233 | TREE_OVERFLOW (arg1)
1234 | TREE_OVERFLOW (arg2));
1235 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1236 So check if force_fit_type truncated the value. */
1237 if (forsize
1238 && ! TREE_OVERFLOW (t)
1239 && (TREE_INT_CST_HIGH (t) != hi
1240 || TREE_INT_CST_LOW (t) != low))
1241 TREE_OVERFLOW (t) = 1;
1242 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1243 | TREE_CONSTANT_OVERFLOW (arg1)
1244 | TREE_CONSTANT_OVERFLOW (arg2));
1245 return t;
1246 }
1247
1248 /* Combine two constants ARG1 and ARG2 under operation CODE
1249 to produce a new constant.
1250 We assume ARG1 and ARG2 have the same data type,
1251 or at least are the same kind of constant and the same machine mode.
1252
1253 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1254
1255 static tree
1256 const_binop (code, arg1, arg2, notrunc)
1257 enum tree_code code;
1258 register tree arg1, arg2;
1259 int notrunc;
1260 {
1261 STRIP_NOPS (arg1); STRIP_NOPS (arg2);
1262
1263 if (TREE_CODE (arg1) == INTEGER_CST)
1264 return int_const_binop (code, arg1, arg2, notrunc, 0);
1265
1266 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1267 if (TREE_CODE (arg1) == REAL_CST)
1268 {
1269 REAL_VALUE_TYPE d1;
1270 REAL_VALUE_TYPE d2;
1271 int overflow = 0;
1272 REAL_VALUE_TYPE value;
1273 tree t;
1274
1275 d1 = TREE_REAL_CST (arg1);
1276 d2 = TREE_REAL_CST (arg2);
1277
1278 /* If either operand is a NaN, just return it. Otherwise, set up
1279 for floating-point trap; we return an overflow. */
1280 if (REAL_VALUE_ISNAN (d1))
1281 return arg1;
1282 else if (REAL_VALUE_ISNAN (d2))
1283 return arg2;
1284 else if (setjmp (float_error))
1285 {
1286 t = copy_node (arg1);
1287 overflow = 1;
1288 goto got_float;
1289 }
1290
1291 set_float_handler (float_error);
1292
1293 #ifdef REAL_ARITHMETIC
1294 REAL_ARITHMETIC (value, code, d1, d2);
1295 #else
1296 switch (code)
1297 {
1298 case PLUS_EXPR:
1299 value = d1 + d2;
1300 break;
1301
1302 case MINUS_EXPR:
1303 value = d1 - d2;
1304 break;
1305
1306 case MULT_EXPR:
1307 value = d1 * d2;
1308 break;
1309
1310 case RDIV_EXPR:
1311 #ifndef REAL_INFINITY
1312 if (d2 == 0)
1313 abort ();
1314 #endif
1315
1316 value = d1 / d2;
1317 break;
1318
1319 case MIN_EXPR:
1320 value = MIN (d1, d2);
1321 break;
1322
1323 case MAX_EXPR:
1324 value = MAX (d1, d2);
1325 break;
1326
1327 default:
1328 abort ();
1329 }
1330 #endif /* no REAL_ARITHMETIC */
1331 t = build_real (TREE_TYPE (arg1),
1332 real_value_truncate (TYPE_MODE (TREE_TYPE (arg1)), value));
1333 got_float:
1334 set_float_handler (NULL_PTR);
1335
1336 TREE_OVERFLOW (t)
1337 = (force_fit_type (t, overflow)
1338 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1339 TREE_CONSTANT_OVERFLOW (t)
1340 = TREE_OVERFLOW (t)
1341 | TREE_CONSTANT_OVERFLOW (arg1)
1342 | TREE_CONSTANT_OVERFLOW (arg2);
1343 return t;
1344 }
1345 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1346 if (TREE_CODE (arg1) == COMPLEX_CST)
1347 {
1348 register tree type = TREE_TYPE (arg1);
1349 register tree r1 = TREE_REALPART (arg1);
1350 register tree i1 = TREE_IMAGPART (arg1);
1351 register tree r2 = TREE_REALPART (arg2);
1352 register tree i2 = TREE_IMAGPART (arg2);
1353 register tree t;
1354
1355 switch (code)
1356 {
1357 case PLUS_EXPR:
1358 t = build_complex (type,
1359 const_binop (PLUS_EXPR, r1, r2, notrunc),
1360 const_binop (PLUS_EXPR, i1, i2, notrunc));
1361 break;
1362
1363 case MINUS_EXPR:
1364 t = build_complex (type,
1365 const_binop (MINUS_EXPR, r1, r2, notrunc),
1366 const_binop (MINUS_EXPR, i1, i2, notrunc));
1367 break;
1368
1369 case MULT_EXPR:
1370 t = build_complex (type,
1371 const_binop (MINUS_EXPR,
1372 const_binop (MULT_EXPR,
1373 r1, r2, notrunc),
1374 const_binop (MULT_EXPR,
1375 i1, i2, notrunc),
1376 notrunc),
1377 const_binop (PLUS_EXPR,
1378 const_binop (MULT_EXPR,
1379 r1, i2, notrunc),
1380 const_binop (MULT_EXPR,
1381 i1, r2, notrunc),
1382 notrunc));
1383 break;
1384
1385 case RDIV_EXPR:
1386 {
1387 register tree magsquared
1388 = const_binop (PLUS_EXPR,
1389 const_binop (MULT_EXPR, r2, r2, notrunc),
1390 const_binop (MULT_EXPR, i2, i2, notrunc),
1391 notrunc);
1392
1393 t = build_complex (type,
1394 const_binop
1395 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1396 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1397 const_binop (PLUS_EXPR,
1398 const_binop (MULT_EXPR, r1, r2,
1399 notrunc),
1400 const_binop (MULT_EXPR, i1, i2,
1401 notrunc),
1402 notrunc),
1403 magsquared, notrunc),
1404 const_binop
1405 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1406 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1407 const_binop (MINUS_EXPR,
1408 const_binop (MULT_EXPR, i1, r2,
1409 notrunc),
1410 const_binop (MULT_EXPR, r1, i2,
1411 notrunc),
1412 notrunc),
1413 magsquared, notrunc));
1414 }
1415 break;
1416
1417 default:
1418 abort ();
1419 }
1420 return t;
1421 }
1422 return 0;
1423 }
1424 \f
1425 /* Return an INTEGER_CST with value V . The type is determined by bit_p:
1426 if it is zero, the type is taken from sizetype; if it is one, the type
1427 is taken from bitsizetype. */
1428
1429 tree
1430 size_int_wide (number, high, bit_p)
1431 unsigned HOST_WIDE_INT number, high;
1432 int bit_p;
1433 {
1434 register tree t;
1435 /* Type-size nodes already made for small sizes. */
1436 static tree size_table[2*HOST_BITS_PER_WIDE_INT + 1][2];
1437
1438 if (number < 2*HOST_BITS_PER_WIDE_INT + 1 && ! high
1439 && size_table[number][bit_p] != 0)
1440 return size_table[number][bit_p];
1441 if (number < 2*HOST_BITS_PER_WIDE_INT + 1 && ! high)
1442 {
1443 push_obstacks_nochange ();
1444 /* Make this a permanent node. */
1445 end_temporary_allocation ();
1446 t = build_int_2 (number, 0);
1447 TREE_TYPE (t) = sizetype_tab[bit_p];
1448 size_table[number][bit_p] = t;
1449 pop_obstacks ();
1450 }
1451 else
1452 {
1453 t = build_int_2 (number, high);
1454 TREE_TYPE (t) = sizetype_tab[bit_p];
1455 TREE_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (t) = force_fit_type (t, 0);
1456 }
1457 return t;
1458 }
1459
1460 /* Combine operands OP1 and OP2 with arithmetic operation CODE.
1461 CODE is a tree code. Data type is taken from `sizetype',
1462 If the operands are constant, so is the result. */
1463
1464 tree
1465 size_binop (code, arg0, arg1)
1466 enum tree_code code;
1467 tree arg0, arg1;
1468 {
1469 /* Handle the special case of two integer constants faster. */
1470 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1471 {
1472 /* And some specific cases even faster than that. */
1473 if (code == PLUS_EXPR && integer_zerop (arg0))
1474 return arg1;
1475 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1476 && integer_zerop (arg1))
1477 return arg0;
1478 else if (code == MULT_EXPR && integer_onep (arg0))
1479 return arg1;
1480
1481 /* Handle general case of two integer constants. */
1482 return int_const_binop (code, arg0, arg1, 0, 1);
1483 }
1484
1485 if (arg0 == error_mark_node || arg1 == error_mark_node)
1486 return error_mark_node;
1487
1488 return fold (build (code, sizetype, arg0, arg1));
1489 }
1490 \f
1491 /* Given T, a tree representing type conversion of ARG1, a constant,
1492 return a constant tree representing the result of conversion. */
1493
1494 static tree
1495 fold_convert (t, arg1)
1496 register tree t;
1497 register tree arg1;
1498 {
1499 register tree type = TREE_TYPE (t);
1500 int overflow = 0;
1501
1502 if (TREE_CODE (type) == POINTER_TYPE || INTEGRAL_TYPE_P (type))
1503 {
1504 if (TREE_CODE (arg1) == INTEGER_CST)
1505 {
1506 /* If we would build a constant wider than GCC supports,
1507 leave the conversion unfolded. */
1508 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1509 return t;
1510
1511 /* Given an integer constant, make new constant with new type,
1512 appropriately sign-extended or truncated. */
1513 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1514 TREE_INT_CST_HIGH (arg1));
1515 TREE_TYPE (t) = type;
1516 /* Indicate an overflow if (1) ARG1 already overflowed,
1517 or (2) force_fit_type indicates an overflow.
1518 Tell force_fit_type that an overflow has already occurred
1519 if ARG1 is a too-large unsigned value and T is signed.
1520 But don't indicate an overflow if converting a pointer. */
1521 TREE_OVERFLOW (t)
1522 = (TREE_OVERFLOW (arg1)
1523 || (force_fit_type (t,
1524 (TREE_INT_CST_HIGH (arg1) < 0
1525 && (TREE_UNSIGNED (type)
1526 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1527 && TREE_CODE (TREE_TYPE (arg1)) != POINTER_TYPE));
1528 TREE_CONSTANT_OVERFLOW (t)
1529 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1530 }
1531 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1532 else if (TREE_CODE (arg1) == REAL_CST)
1533 {
1534 /* Don't initialize these, use assignments.
1535 Initialized local aggregates don't work on old compilers. */
1536 REAL_VALUE_TYPE x;
1537 REAL_VALUE_TYPE l;
1538 REAL_VALUE_TYPE u;
1539 tree type1 = TREE_TYPE (arg1);
1540 int no_upper_bound;
1541
1542 x = TREE_REAL_CST (arg1);
1543 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1544
1545 no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
1546 if (!no_upper_bound)
1547 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1548
1549 /* See if X will be in range after truncation towards 0.
1550 To compensate for truncation, move the bounds away from 0,
1551 but reject if X exactly equals the adjusted bounds. */
1552 #ifdef REAL_ARITHMETIC
1553 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1554 if (!no_upper_bound)
1555 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1556 #else
1557 l--;
1558 if (!no_upper_bound)
1559 u++;
1560 #endif
1561 /* If X is a NaN, use zero instead and show we have an overflow.
1562 Otherwise, range check. */
1563 if (REAL_VALUE_ISNAN (x))
1564 overflow = 1, x = dconst0;
1565 else if (! (REAL_VALUES_LESS (l, x)
1566 && !no_upper_bound
1567 && REAL_VALUES_LESS (x, u)))
1568 overflow = 1;
1569
1570 #ifndef REAL_ARITHMETIC
1571 {
1572 HOST_WIDE_INT low, high;
1573 HOST_WIDE_INT half_word
1574 = (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2);
1575
1576 if (x < 0)
1577 x = -x;
1578
1579 high = (HOST_WIDE_INT) (x / half_word / half_word);
1580 x -= (REAL_VALUE_TYPE) high * half_word * half_word;
1581 if (x >= (REAL_VALUE_TYPE) half_word * half_word / 2)
1582 {
1583 low = x - (REAL_VALUE_TYPE) half_word * half_word / 2;
1584 low |= (HOST_WIDE_INT) -1 << (HOST_BITS_PER_WIDE_INT - 1);
1585 }
1586 else
1587 low = (HOST_WIDE_INT) x;
1588 if (TREE_REAL_CST (arg1) < 0)
1589 neg_double (low, high, &low, &high);
1590 t = build_int_2 (low, high);
1591 }
1592 #else
1593 {
1594 HOST_WIDE_INT low, high;
1595 REAL_VALUE_TO_INT (&low, &high, x);
1596 t = build_int_2 (low, high);
1597 }
1598 #endif
1599 TREE_TYPE (t) = type;
1600 TREE_OVERFLOW (t)
1601 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1602 TREE_CONSTANT_OVERFLOW (t)
1603 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1604 }
1605 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1606 TREE_TYPE (t) = type;
1607 }
1608 else if (TREE_CODE (type) == REAL_TYPE)
1609 {
1610 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1611 if (TREE_CODE (arg1) == INTEGER_CST)
1612 return build_real_from_int_cst (type, arg1);
1613 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1614 if (TREE_CODE (arg1) == REAL_CST)
1615 {
1616 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1617 {
1618 t = arg1;
1619 TREE_TYPE (arg1) = type;
1620 return t;
1621 }
1622 else if (setjmp (float_error))
1623 {
1624 overflow = 1;
1625 t = copy_node (arg1);
1626 goto got_it;
1627 }
1628 set_float_handler (float_error);
1629
1630 t = build_real (type, real_value_truncate (TYPE_MODE (type),
1631 TREE_REAL_CST (arg1)));
1632 set_float_handler (NULL_PTR);
1633
1634 got_it:
1635 TREE_OVERFLOW (t)
1636 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1637 TREE_CONSTANT_OVERFLOW (t)
1638 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1639 return t;
1640 }
1641 }
1642 TREE_CONSTANT (t) = 1;
1643 return t;
1644 }
1645 \f
1646 /* Return an expr equal to X but certainly not valid as an lvalue.
1647 Also make sure it is not valid as an null pointer constant. */
1648
1649 tree
1650 non_lvalue (x)
1651 tree x;
1652 {
1653 tree result;
1654
1655 /* These things are certainly not lvalues. */
1656 if (TREE_CODE (x) == NON_LVALUE_EXPR
1657 || TREE_CODE (x) == INTEGER_CST
1658 || TREE_CODE (x) == REAL_CST
1659 || TREE_CODE (x) == STRING_CST
1660 || TREE_CODE (x) == ADDR_EXPR)
1661 {
1662 if (TREE_CODE (x) == INTEGER_CST && integer_zerop (x))
1663 {
1664 /* Use NOP_EXPR instead of NON_LVALUE_EXPR
1665 so convert_for_assignment won't strip it.
1666 This is so this 0 won't be treated as a null pointer constant. */
1667 result = build1 (NOP_EXPR, TREE_TYPE (x), x);
1668 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1669 return result;
1670 }
1671 return x;
1672 }
1673
1674 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1675 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1676 return result;
1677 }
1678
1679 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1680 Zero means allow extended lvalues. */
1681
1682 int pedantic_lvalues;
1683
1684 /* When pedantic, return an expr equal to X but certainly not valid as a
1685 pedantic lvalue. Otherwise, return X. */
1686
1687 tree
1688 pedantic_non_lvalue (x)
1689 tree x;
1690 {
1691 if (pedantic_lvalues)
1692 return non_lvalue (x);
1693 else
1694 return x;
1695 }
1696 \f
1697 /* Given a tree comparison code, return the code that is the logical inverse
1698 of the given code. It is not safe to do this for floating-point
1699 comparisons, except for NE_EXPR and EQ_EXPR. */
1700
1701 static enum tree_code
1702 invert_tree_comparison (code)
1703 enum tree_code code;
1704 {
1705 switch (code)
1706 {
1707 case EQ_EXPR:
1708 return NE_EXPR;
1709 case NE_EXPR:
1710 return EQ_EXPR;
1711 case GT_EXPR:
1712 return LE_EXPR;
1713 case GE_EXPR:
1714 return LT_EXPR;
1715 case LT_EXPR:
1716 return GE_EXPR;
1717 case LE_EXPR:
1718 return GT_EXPR;
1719 default:
1720 abort ();
1721 }
1722 }
1723
1724 /* Similar, but return the comparison that results if the operands are
1725 swapped. This is safe for floating-point. */
1726
1727 static enum tree_code
1728 swap_tree_comparison (code)
1729 enum tree_code code;
1730 {
1731 switch (code)
1732 {
1733 case EQ_EXPR:
1734 case NE_EXPR:
1735 return code;
1736 case GT_EXPR:
1737 return LT_EXPR;
1738 case GE_EXPR:
1739 return LE_EXPR;
1740 case LT_EXPR:
1741 return GT_EXPR;
1742 case LE_EXPR:
1743 return GE_EXPR;
1744 default:
1745 abort ();
1746 }
1747 }
1748
1749 /* Return nonzero if CODE is a tree code that represents a truth value. */
1750
1751 static int
1752 truth_value_p (code)
1753 enum tree_code code;
1754 {
1755 return (TREE_CODE_CLASS (code) == '<'
1756 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1757 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1758 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1759 }
1760 \f
1761 /* Return nonzero if two operands are necessarily equal.
1762 If ONLY_CONST is non-zero, only return non-zero for constants.
1763 This function tests whether the operands are indistinguishable;
1764 it does not test whether they are equal using C's == operation.
1765 The distinction is important for IEEE floating point, because
1766 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1767 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1768
1769 int
1770 operand_equal_p (arg0, arg1, only_const)
1771 tree arg0, arg1;
1772 int only_const;
1773 {
1774 /* If both types don't have the same signedness, then we can't consider
1775 them equal. We must check this before the STRIP_NOPS calls
1776 because they may change the signedness of the arguments. */
1777 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1778 return 0;
1779
1780 STRIP_NOPS (arg0);
1781 STRIP_NOPS (arg1);
1782
1783 if (TREE_CODE (arg0) != TREE_CODE (arg1)
1784 /* This is needed for conversions and for COMPONENT_REF.
1785 Might as well play it safe and always test this. */
1786 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1787 return 0;
1788
1789 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1790 We don't care about side effects in that case because the SAVE_EXPR
1791 takes care of that for us. In all other cases, two expressions are
1792 equal if they have no side effects. If we have two identical
1793 expressions with side effects that should be treated the same due
1794 to the only side effects being identical SAVE_EXPR's, that will
1795 be detected in the recursive calls below. */
1796 if (arg0 == arg1 && ! only_const
1797 && (TREE_CODE (arg0) == SAVE_EXPR
1798 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
1799 return 1;
1800
1801 /* Next handle constant cases, those for which we can return 1 even
1802 if ONLY_CONST is set. */
1803 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
1804 switch (TREE_CODE (arg0))
1805 {
1806 case INTEGER_CST:
1807 return (! TREE_CONSTANT_OVERFLOW (arg0)
1808 && ! TREE_CONSTANT_OVERFLOW (arg1)
1809 && TREE_INT_CST_LOW (arg0) == TREE_INT_CST_LOW (arg1)
1810 && TREE_INT_CST_HIGH (arg0) == TREE_INT_CST_HIGH (arg1));
1811
1812 case REAL_CST:
1813 return (! TREE_CONSTANT_OVERFLOW (arg0)
1814 && ! TREE_CONSTANT_OVERFLOW (arg1)
1815 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
1816 TREE_REAL_CST (arg1)));
1817
1818 case COMPLEX_CST:
1819 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
1820 only_const)
1821 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
1822 only_const));
1823
1824 case STRING_CST:
1825 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
1826 && ! strncmp (TREE_STRING_POINTER (arg0),
1827 TREE_STRING_POINTER (arg1),
1828 TREE_STRING_LENGTH (arg0)));
1829
1830 case ADDR_EXPR:
1831 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
1832 0);
1833 default:
1834 break;
1835 }
1836
1837 if (only_const)
1838 return 0;
1839
1840 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1841 {
1842 case '1':
1843 /* Two conversions are equal only if signedness and modes match. */
1844 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1845 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1846 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1847 return 0;
1848
1849 return operand_equal_p (TREE_OPERAND (arg0, 0),
1850 TREE_OPERAND (arg1, 0), 0);
1851
1852 case '<':
1853 case '2':
1854 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
1855 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
1856 0))
1857 return 1;
1858
1859 /* For commutative ops, allow the other order. */
1860 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
1861 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
1862 || TREE_CODE (arg0) == BIT_IOR_EXPR
1863 || TREE_CODE (arg0) == BIT_XOR_EXPR
1864 || TREE_CODE (arg0) == BIT_AND_EXPR
1865 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
1866 && operand_equal_p (TREE_OPERAND (arg0, 0),
1867 TREE_OPERAND (arg1, 1), 0)
1868 && operand_equal_p (TREE_OPERAND (arg0, 1),
1869 TREE_OPERAND (arg1, 0), 0));
1870
1871 case 'r':
1872 switch (TREE_CODE (arg0))
1873 {
1874 case INDIRECT_REF:
1875 return operand_equal_p (TREE_OPERAND (arg0, 0),
1876 TREE_OPERAND (arg1, 0), 0);
1877
1878 case COMPONENT_REF:
1879 case ARRAY_REF:
1880 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1881 TREE_OPERAND (arg1, 0), 0)
1882 && operand_equal_p (TREE_OPERAND (arg0, 1),
1883 TREE_OPERAND (arg1, 1), 0));
1884
1885 case BIT_FIELD_REF:
1886 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1887 TREE_OPERAND (arg1, 0), 0)
1888 && operand_equal_p (TREE_OPERAND (arg0, 1),
1889 TREE_OPERAND (arg1, 1), 0)
1890 && operand_equal_p (TREE_OPERAND (arg0, 2),
1891 TREE_OPERAND (arg1, 2), 0));
1892 default:
1893 return 0;
1894 }
1895
1896 default:
1897 return 0;
1898 }
1899 }
1900 \f
1901 /* Similar to operand_equal_p, but see if ARG0 might have been made by
1902 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1903
1904 When in doubt, return 0. */
1905
1906 static int
1907 operand_equal_for_comparison_p (arg0, arg1, other)
1908 tree arg0, arg1;
1909 tree other;
1910 {
1911 int unsignedp1, unsignedpo;
1912 tree primarg0, primarg1, primother;
1913 unsigned correct_width;
1914
1915 if (operand_equal_p (arg0, arg1, 0))
1916 return 1;
1917
1918 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
1919 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
1920 return 0;
1921
1922 /* Discard any conversions that don't change the modes of ARG0 and ARG1
1923 and see if the inner values are the same. This removes any
1924 signedness comparison, which doesn't matter here. */
1925 primarg0 = arg0, primarg1 = arg1;
1926 STRIP_NOPS (primarg0); STRIP_NOPS (primarg1);
1927 if (operand_equal_p (primarg0, primarg1, 0))
1928 return 1;
1929
1930 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
1931 actual comparison operand, ARG0.
1932
1933 First throw away any conversions to wider types
1934 already present in the operands. */
1935
1936 primarg1 = get_narrower (arg1, &unsignedp1);
1937 primother = get_narrower (other, &unsignedpo);
1938
1939 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
1940 if (unsignedp1 == unsignedpo
1941 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
1942 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
1943 {
1944 tree type = TREE_TYPE (arg0);
1945
1946 /* Make sure shorter operand is extended the right way
1947 to match the longer operand. */
1948 primarg1 = convert (signed_or_unsigned_type (unsignedp1,
1949 TREE_TYPE (primarg1)),
1950 primarg1);
1951
1952 if (operand_equal_p (arg0, convert (type, primarg1), 0))
1953 return 1;
1954 }
1955
1956 return 0;
1957 }
1958 \f
1959 /* See if ARG is an expression that is either a comparison or is performing
1960 arithmetic on comparisons. The comparisons must only be comparing
1961 two different values, which will be stored in *CVAL1 and *CVAL2; if
1962 they are non-zero it means that some operands have already been found.
1963 No variables may be used anywhere else in the expression except in the
1964 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
1965 the expression and save_expr needs to be called with CVAL1 and CVAL2.
1966
1967 If this is true, return 1. Otherwise, return zero. */
1968
1969 static int
1970 twoval_comparison_p (arg, cval1, cval2, save_p)
1971 tree arg;
1972 tree *cval1, *cval2;
1973 int *save_p;
1974 {
1975 enum tree_code code = TREE_CODE (arg);
1976 char class = TREE_CODE_CLASS (code);
1977
1978 /* We can handle some of the 'e' cases here. */
1979 if (class == 'e' && code == TRUTH_NOT_EXPR)
1980 class = '1';
1981 else if (class == 'e'
1982 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
1983 || code == COMPOUND_EXPR))
1984 class = '2';
1985
1986 /* ??? Disable this since the SAVE_EXPR might already be in use outside
1987 the expression. There may be no way to make this work, but it needs
1988 to be looked at again for 2.6. */
1989 #if 0
1990 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0)
1991 {
1992 /* If we've already found a CVAL1 or CVAL2, this expression is
1993 two complex to handle. */
1994 if (*cval1 || *cval2)
1995 return 0;
1996
1997 class = '1';
1998 *save_p = 1;
1999 }
2000 #endif
2001
2002 switch (class)
2003 {
2004 case '1':
2005 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2006
2007 case '2':
2008 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2009 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2010 cval1, cval2, save_p));
2011
2012 case 'c':
2013 return 1;
2014
2015 case 'e':
2016 if (code == COND_EXPR)
2017 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2018 cval1, cval2, save_p)
2019 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2020 cval1, cval2, save_p)
2021 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2022 cval1, cval2, save_p));
2023 return 0;
2024
2025 case '<':
2026 /* First see if we can handle the first operand, then the second. For
2027 the second operand, we know *CVAL1 can't be zero. It must be that
2028 one side of the comparison is each of the values; test for the
2029 case where this isn't true by failing if the two operands
2030 are the same. */
2031
2032 if (operand_equal_p (TREE_OPERAND (arg, 0),
2033 TREE_OPERAND (arg, 1), 0))
2034 return 0;
2035
2036 if (*cval1 == 0)
2037 *cval1 = TREE_OPERAND (arg, 0);
2038 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2039 ;
2040 else if (*cval2 == 0)
2041 *cval2 = TREE_OPERAND (arg, 0);
2042 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2043 ;
2044 else
2045 return 0;
2046
2047 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2048 ;
2049 else if (*cval2 == 0)
2050 *cval2 = TREE_OPERAND (arg, 1);
2051 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2052 ;
2053 else
2054 return 0;
2055
2056 return 1;
2057
2058 default:
2059 return 0;
2060 }
2061 }
2062 \f
2063 /* ARG is a tree that is known to contain just arithmetic operations and
2064 comparisons. Evaluate the operations in the tree substituting NEW0 for
2065 any occurrence of OLD0 as an operand of a comparison and likewise for
2066 NEW1 and OLD1. */
2067
2068 static tree
2069 eval_subst (arg, old0, new0, old1, new1)
2070 tree arg;
2071 tree old0, new0, old1, new1;
2072 {
2073 tree type = TREE_TYPE (arg);
2074 enum tree_code code = TREE_CODE (arg);
2075 char class = TREE_CODE_CLASS (code);
2076
2077 /* We can handle some of the 'e' cases here. */
2078 if (class == 'e' && code == TRUTH_NOT_EXPR)
2079 class = '1';
2080 else if (class == 'e'
2081 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2082 class = '2';
2083
2084 switch (class)
2085 {
2086 case '1':
2087 return fold (build1 (code, type,
2088 eval_subst (TREE_OPERAND (arg, 0),
2089 old0, new0, old1, new1)));
2090
2091 case '2':
2092 return fold (build (code, type,
2093 eval_subst (TREE_OPERAND (arg, 0),
2094 old0, new0, old1, new1),
2095 eval_subst (TREE_OPERAND (arg, 1),
2096 old0, new0, old1, new1)));
2097
2098 case 'e':
2099 switch (code)
2100 {
2101 case SAVE_EXPR:
2102 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2103
2104 case COMPOUND_EXPR:
2105 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2106
2107 case COND_EXPR:
2108 return fold (build (code, type,
2109 eval_subst (TREE_OPERAND (arg, 0),
2110 old0, new0, old1, new1),
2111 eval_subst (TREE_OPERAND (arg, 1),
2112 old0, new0, old1, new1),
2113 eval_subst (TREE_OPERAND (arg, 2),
2114 old0, new0, old1, new1)));
2115 default:
2116 break;
2117 }
2118 /* fall through (???) */
2119
2120 case '<':
2121 {
2122 tree arg0 = TREE_OPERAND (arg, 0);
2123 tree arg1 = TREE_OPERAND (arg, 1);
2124
2125 /* We need to check both for exact equality and tree equality. The
2126 former will be true if the operand has a side-effect. In that
2127 case, we know the operand occurred exactly once. */
2128
2129 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2130 arg0 = new0;
2131 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2132 arg0 = new1;
2133
2134 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2135 arg1 = new0;
2136 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2137 arg1 = new1;
2138
2139 return fold (build (code, type, arg0, arg1));
2140 }
2141
2142 default:
2143 return arg;
2144 }
2145 }
2146 \f
2147 /* Return a tree for the case when the result of an expression is RESULT
2148 converted to TYPE and OMITTED was previously an operand of the expression
2149 but is now not needed (e.g., we folded OMITTED * 0).
2150
2151 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2152 the conversion of RESULT to TYPE. */
2153
2154 static tree
2155 omit_one_operand (type, result, omitted)
2156 tree type, result, omitted;
2157 {
2158 tree t = convert (type, result);
2159
2160 if (TREE_SIDE_EFFECTS (omitted))
2161 return build (COMPOUND_EXPR, type, omitted, t);
2162
2163 return non_lvalue (t);
2164 }
2165
2166 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2167
2168 static tree
2169 pedantic_omit_one_operand (type, result, omitted)
2170 tree type, result, omitted;
2171 {
2172 tree t = convert (type, result);
2173
2174 if (TREE_SIDE_EFFECTS (omitted))
2175 return build (COMPOUND_EXPR, type, omitted, t);
2176
2177 return pedantic_non_lvalue (t);
2178 }
2179
2180
2181 \f
2182 /* Return a simplified tree node for the truth-negation of ARG. This
2183 never alters ARG itself. We assume that ARG is an operation that
2184 returns a truth value (0 or 1). */
2185
2186 tree
2187 invert_truthvalue (arg)
2188 tree arg;
2189 {
2190 tree type = TREE_TYPE (arg);
2191 enum tree_code code = TREE_CODE (arg);
2192
2193 if (code == ERROR_MARK)
2194 return arg;
2195
2196 /* If this is a comparison, we can simply invert it, except for
2197 floating-point non-equality comparisons, in which case we just
2198 enclose a TRUTH_NOT_EXPR around what we have. */
2199
2200 if (TREE_CODE_CLASS (code) == '<')
2201 {
2202 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2203 && code != NE_EXPR && code != EQ_EXPR)
2204 return build1 (TRUTH_NOT_EXPR, type, arg);
2205 else
2206 return build (invert_tree_comparison (code), type,
2207 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2208 }
2209
2210 switch (code)
2211 {
2212 case INTEGER_CST:
2213 return convert (type, build_int_2 (TREE_INT_CST_LOW (arg) == 0
2214 && TREE_INT_CST_HIGH (arg) == 0, 0));
2215
2216 case TRUTH_AND_EXPR:
2217 return build (TRUTH_OR_EXPR, type,
2218 invert_truthvalue (TREE_OPERAND (arg, 0)),
2219 invert_truthvalue (TREE_OPERAND (arg, 1)));
2220
2221 case TRUTH_OR_EXPR:
2222 return build (TRUTH_AND_EXPR, type,
2223 invert_truthvalue (TREE_OPERAND (arg, 0)),
2224 invert_truthvalue (TREE_OPERAND (arg, 1)));
2225
2226 case TRUTH_XOR_EXPR:
2227 /* Here we can invert either operand. We invert the first operand
2228 unless the second operand is a TRUTH_NOT_EXPR in which case our
2229 result is the XOR of the first operand with the inside of the
2230 negation of the second operand. */
2231
2232 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2233 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2234 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2235 else
2236 return build (TRUTH_XOR_EXPR, type,
2237 invert_truthvalue (TREE_OPERAND (arg, 0)),
2238 TREE_OPERAND (arg, 1));
2239
2240 case TRUTH_ANDIF_EXPR:
2241 return build (TRUTH_ORIF_EXPR, type,
2242 invert_truthvalue (TREE_OPERAND (arg, 0)),
2243 invert_truthvalue (TREE_OPERAND (arg, 1)));
2244
2245 case TRUTH_ORIF_EXPR:
2246 return build (TRUTH_ANDIF_EXPR, type,
2247 invert_truthvalue (TREE_OPERAND (arg, 0)),
2248 invert_truthvalue (TREE_OPERAND (arg, 1)));
2249
2250 case TRUTH_NOT_EXPR:
2251 return TREE_OPERAND (arg, 0);
2252
2253 case COND_EXPR:
2254 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2255 invert_truthvalue (TREE_OPERAND (arg, 1)),
2256 invert_truthvalue (TREE_OPERAND (arg, 2)));
2257
2258 case COMPOUND_EXPR:
2259 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2260 invert_truthvalue (TREE_OPERAND (arg, 1)));
2261
2262 case NON_LVALUE_EXPR:
2263 return invert_truthvalue (TREE_OPERAND (arg, 0));
2264
2265 case NOP_EXPR:
2266 case CONVERT_EXPR:
2267 case FLOAT_EXPR:
2268 return build1 (TREE_CODE (arg), type,
2269 invert_truthvalue (TREE_OPERAND (arg, 0)));
2270
2271 case BIT_AND_EXPR:
2272 if (!integer_onep (TREE_OPERAND (arg, 1)))
2273 break;
2274 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2275
2276 case SAVE_EXPR:
2277 return build1 (TRUTH_NOT_EXPR, type, arg);
2278
2279 case CLEANUP_POINT_EXPR:
2280 return build1 (CLEANUP_POINT_EXPR, type,
2281 invert_truthvalue (TREE_OPERAND (arg, 0)));
2282
2283 default:
2284 break;
2285 }
2286 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2287 abort ();
2288 return build1 (TRUTH_NOT_EXPR, type, arg);
2289 }
2290
2291 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2292 operands are another bit-wise operation with a common input. If so,
2293 distribute the bit operations to save an operation and possibly two if
2294 constants are involved. For example, convert
2295 (A | B) & (A | C) into A | (B & C)
2296 Further simplification will occur if B and C are constants.
2297
2298 If this optimization cannot be done, 0 will be returned. */
2299
2300 static tree
2301 distribute_bit_expr (code, type, arg0, arg1)
2302 enum tree_code code;
2303 tree type;
2304 tree arg0, arg1;
2305 {
2306 tree common;
2307 tree left, right;
2308
2309 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2310 || TREE_CODE (arg0) == code
2311 || (TREE_CODE (arg0) != BIT_AND_EXPR
2312 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2313 return 0;
2314
2315 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2316 {
2317 common = TREE_OPERAND (arg0, 0);
2318 left = TREE_OPERAND (arg0, 1);
2319 right = TREE_OPERAND (arg1, 1);
2320 }
2321 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2322 {
2323 common = TREE_OPERAND (arg0, 0);
2324 left = TREE_OPERAND (arg0, 1);
2325 right = TREE_OPERAND (arg1, 0);
2326 }
2327 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2328 {
2329 common = TREE_OPERAND (arg0, 1);
2330 left = TREE_OPERAND (arg0, 0);
2331 right = TREE_OPERAND (arg1, 1);
2332 }
2333 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2334 {
2335 common = TREE_OPERAND (arg0, 1);
2336 left = TREE_OPERAND (arg0, 0);
2337 right = TREE_OPERAND (arg1, 0);
2338 }
2339 else
2340 return 0;
2341
2342 return fold (build (TREE_CODE (arg0), type, common,
2343 fold (build (code, type, left, right))));
2344 }
2345 \f
2346 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2347 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2348
2349 static tree
2350 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2351 tree inner;
2352 tree type;
2353 int bitsize, bitpos;
2354 int unsignedp;
2355 {
2356 tree result = build (BIT_FIELD_REF, type, inner,
2357 size_int (bitsize), bitsize_int (bitpos, 0L));
2358
2359 TREE_UNSIGNED (result) = unsignedp;
2360
2361 return result;
2362 }
2363
2364 /* Optimize a bit-field compare.
2365
2366 There are two cases: First is a compare against a constant and the
2367 second is a comparison of two items where the fields are at the same
2368 bit position relative to the start of a chunk (byte, halfword, word)
2369 large enough to contain it. In these cases we can avoid the shift
2370 implicit in bitfield extractions.
2371
2372 For constants, we emit a compare of the shifted constant with the
2373 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2374 compared. For two fields at the same position, we do the ANDs with the
2375 similar mask and compare the result of the ANDs.
2376
2377 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2378 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2379 are the left and right operands of the comparison, respectively.
2380
2381 If the optimization described above can be done, we return the resulting
2382 tree. Otherwise we return zero. */
2383
2384 static tree
2385 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2386 enum tree_code code;
2387 tree compare_type;
2388 tree lhs, rhs;
2389 {
2390 int lbitpos, lbitsize, rbitpos, rbitsize;
2391 int lnbitpos, lnbitsize, rnbitpos = 0, rnbitsize = 0;
2392 tree type = TREE_TYPE (lhs);
2393 tree signed_type, unsigned_type;
2394 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2395 enum machine_mode lmode, rmode, lnmode, rnmode = VOIDmode;
2396 int lunsignedp, runsignedp;
2397 int lvolatilep = 0, rvolatilep = 0;
2398 int alignment;
2399 tree linner, rinner = NULL_TREE;
2400 tree mask;
2401 tree offset;
2402
2403 /* Get all the information about the extractions being done. If the bit size
2404 if the same as the size of the underlying object, we aren't doing an
2405 extraction at all and so can do nothing. */
2406 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2407 &lunsignedp, &lvolatilep, &alignment);
2408 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2409 || offset != 0)
2410 return 0;
2411
2412 if (!const_p)
2413 {
2414 /* If this is not a constant, we can only do something if bit positions,
2415 sizes, and signedness are the same. */
2416 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2417 &runsignedp, &rvolatilep, &alignment);
2418
2419 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2420 || lunsignedp != runsignedp || offset != 0)
2421 return 0;
2422 }
2423
2424 /* See if we can find a mode to refer to this field. We should be able to,
2425 but fail if we can't. */
2426 lnmode = get_best_mode (lbitsize, lbitpos,
2427 TYPE_ALIGN (TREE_TYPE (linner)), word_mode,
2428 lvolatilep);
2429 if (lnmode == VOIDmode)
2430 return 0;
2431
2432 /* Set signed and unsigned types of the precision of this mode for the
2433 shifts below. */
2434 signed_type = type_for_mode (lnmode, 0);
2435 unsigned_type = type_for_mode (lnmode, 1);
2436
2437 if (! const_p)
2438 {
2439 rnmode = get_best_mode (rbitsize, rbitpos,
2440 TYPE_ALIGN (TREE_TYPE (rinner)), word_mode,
2441 rvolatilep);
2442 if (rnmode == VOIDmode)
2443 return 0;
2444 }
2445
2446 /* Compute the bit position and size for the new reference and our offset
2447 within it. If the new reference is the same size as the original, we
2448 won't optimize anything, so return zero. */
2449 lnbitsize = GET_MODE_BITSIZE (lnmode);
2450 lnbitpos = lbitpos & ~ (lnbitsize - 1);
2451 lbitpos -= lnbitpos;
2452 if (lnbitsize == lbitsize)
2453 return 0;
2454
2455 if (! const_p)
2456 {
2457 rnbitsize = GET_MODE_BITSIZE (rnmode);
2458 rnbitpos = rbitpos & ~ (rnbitsize - 1);
2459 rbitpos -= rnbitpos;
2460 if (rnbitsize == rbitsize)
2461 return 0;
2462 }
2463
2464 if (BYTES_BIG_ENDIAN)
2465 lbitpos = lnbitsize - lbitsize - lbitpos;
2466
2467 /* Make the mask to be used against the extracted field. */
2468 mask = build_int_2 (~0, ~0);
2469 TREE_TYPE (mask) = unsigned_type;
2470 force_fit_type (mask, 0);
2471 mask = convert (unsigned_type, mask);
2472 mask = const_binop (LSHIFT_EXPR, mask, size_int (lnbitsize - lbitsize), 0);
2473 mask = const_binop (RSHIFT_EXPR, mask,
2474 size_int (lnbitsize - lbitsize - lbitpos), 0);
2475
2476 if (! const_p)
2477 /* If not comparing with constant, just rework the comparison
2478 and return. */
2479 return build (code, compare_type,
2480 build (BIT_AND_EXPR, unsigned_type,
2481 make_bit_field_ref (linner, unsigned_type,
2482 lnbitsize, lnbitpos, 1),
2483 mask),
2484 build (BIT_AND_EXPR, unsigned_type,
2485 make_bit_field_ref (rinner, unsigned_type,
2486 rnbitsize, rnbitpos, 1),
2487 mask));
2488
2489 /* Otherwise, we are handling the constant case. See if the constant is too
2490 big for the field. Warn and return a tree of for 0 (false) if so. We do
2491 this not only for its own sake, but to avoid having to test for this
2492 error case below. If we didn't, we might generate wrong code.
2493
2494 For unsigned fields, the constant shifted right by the field length should
2495 be all zero. For signed fields, the high-order bits should agree with
2496 the sign bit. */
2497
2498 if (lunsignedp)
2499 {
2500 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2501 convert (unsigned_type, rhs),
2502 size_int (lbitsize), 0)))
2503 {
2504 warning ("comparison is always %s due to width of bitfield",
2505 code == NE_EXPR ? "one" : "zero");
2506 return convert (compare_type,
2507 (code == NE_EXPR
2508 ? integer_one_node : integer_zero_node));
2509 }
2510 }
2511 else
2512 {
2513 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2514 size_int (lbitsize - 1), 0);
2515 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2516 {
2517 warning ("comparison is always %s due to width of bitfield",
2518 code == NE_EXPR ? "one" : "zero");
2519 return convert (compare_type,
2520 (code == NE_EXPR
2521 ? integer_one_node : integer_zero_node));
2522 }
2523 }
2524
2525 /* Single-bit compares should always be against zero. */
2526 if (lbitsize == 1 && ! integer_zerop (rhs))
2527 {
2528 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2529 rhs = convert (type, integer_zero_node);
2530 }
2531
2532 /* Make a new bitfield reference, shift the constant over the
2533 appropriate number of bits and mask it with the computed mask
2534 (in case this was a signed field). If we changed it, make a new one. */
2535 lhs = make_bit_field_ref (linner, unsigned_type, lnbitsize, lnbitpos, 1);
2536 if (lvolatilep)
2537 {
2538 TREE_SIDE_EFFECTS (lhs) = 1;
2539 TREE_THIS_VOLATILE (lhs) = 1;
2540 }
2541
2542 rhs = fold (const_binop (BIT_AND_EXPR,
2543 const_binop (LSHIFT_EXPR,
2544 convert (unsigned_type, rhs),
2545 size_int (lbitpos), 0),
2546 mask, 0));
2547
2548 return build (code, compare_type,
2549 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2550 rhs);
2551 }
2552 \f
2553 /* Subroutine for fold_truthop: decode a field reference.
2554
2555 If EXP is a comparison reference, we return the innermost reference.
2556
2557 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2558 set to the starting bit number.
2559
2560 If the innermost field can be completely contained in a mode-sized
2561 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2562
2563 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2564 otherwise it is not changed.
2565
2566 *PUNSIGNEDP is set to the signedness of the field.
2567
2568 *PMASK is set to the mask used. This is either contained in a
2569 BIT_AND_EXPR or derived from the width of the field.
2570
2571 *PAND_MASK is set the the mask found in a BIT_AND_EXPR, if any.
2572
2573 Return 0 if this is not a component reference or is one that we can't
2574 do anything with. */
2575
2576 static tree
2577 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
2578 pvolatilep, pmask, pand_mask)
2579 tree exp;
2580 int *pbitsize, *pbitpos;
2581 enum machine_mode *pmode;
2582 int *punsignedp, *pvolatilep;
2583 tree *pmask;
2584 tree *pand_mask;
2585 {
2586 tree and_mask = 0;
2587 tree mask, inner, offset;
2588 tree unsigned_type;
2589 int precision;
2590 int alignment;
2591
2592 /* All the optimizations using this function assume integer fields.
2593 There are problems with FP fields since the type_for_size call
2594 below can fail for, e.g., XFmode. */
2595 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2596 return 0;
2597
2598 STRIP_NOPS (exp);
2599
2600 if (TREE_CODE (exp) == BIT_AND_EXPR)
2601 {
2602 and_mask = TREE_OPERAND (exp, 1);
2603 exp = TREE_OPERAND (exp, 0);
2604 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2605 if (TREE_CODE (and_mask) != INTEGER_CST)
2606 return 0;
2607 }
2608
2609
2610 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2611 punsignedp, pvolatilep, &alignment);
2612 if ((inner == exp && and_mask == 0)
2613 || *pbitsize < 0 || offset != 0)
2614 return 0;
2615
2616 /* Compute the mask to access the bitfield. */
2617 unsigned_type = type_for_size (*pbitsize, 1);
2618 precision = TYPE_PRECISION (unsigned_type);
2619
2620 mask = build_int_2 (~0, ~0);
2621 TREE_TYPE (mask) = unsigned_type;
2622 force_fit_type (mask, 0);
2623 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2624 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2625
2626 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2627 if (and_mask != 0)
2628 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2629 convert (unsigned_type, and_mask), mask));
2630
2631 *pmask = mask;
2632 *pand_mask = and_mask;
2633 return inner;
2634 }
2635
2636 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
2637 bit positions. */
2638
2639 static int
2640 all_ones_mask_p (mask, size)
2641 tree mask;
2642 int size;
2643 {
2644 tree type = TREE_TYPE (mask);
2645 int precision = TYPE_PRECISION (type);
2646 tree tmask;
2647
2648 tmask = build_int_2 (~0, ~0);
2649 TREE_TYPE (tmask) = signed_type (type);
2650 force_fit_type (tmask, 0);
2651 return
2652 tree_int_cst_equal (mask,
2653 const_binop (RSHIFT_EXPR,
2654 const_binop (LSHIFT_EXPR, tmask,
2655 size_int (precision - size),
2656 0),
2657 size_int (precision - size), 0));
2658 }
2659
2660 /* Subroutine for fold_truthop: determine if an operand is simple enough
2661 to be evaluated unconditionally. */
2662
2663 static int
2664 simple_operand_p (exp)
2665 tree exp;
2666 {
2667 /* Strip any conversions that don't change the machine mode. */
2668 while ((TREE_CODE (exp) == NOP_EXPR
2669 || TREE_CODE (exp) == CONVERT_EXPR)
2670 && (TYPE_MODE (TREE_TYPE (exp))
2671 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2672 exp = TREE_OPERAND (exp, 0);
2673
2674 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2675 || (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
2676 && ! TREE_ADDRESSABLE (exp)
2677 && ! TREE_THIS_VOLATILE (exp)
2678 && ! DECL_NONLOCAL (exp)
2679 /* Don't regard global variables as simple. They may be
2680 allocated in ways unknown to the compiler (shared memory,
2681 #pragma weak, etc). */
2682 && ! TREE_PUBLIC (exp)
2683 && ! DECL_EXTERNAL (exp)
2684 /* Loading a static variable is unduly expensive, but global
2685 registers aren't expensive. */
2686 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2687 }
2688 \f
2689 /* The following functions are subroutines to fold_range_test and allow it to
2690 try to change a logical combination of comparisons into a range test.
2691
2692 For example, both
2693 X == 2 && X == 3 && X == 4 && X == 5
2694 and
2695 X >= 2 && X <= 5
2696 are converted to
2697 (unsigned) (X - 2) <= 3
2698
2699 We describe each set of comparisons as being either inside or outside
2700 a range, using a variable named like IN_P, and then describe the
2701 range with a lower and upper bound. If one of the bounds is omitted,
2702 it represents either the highest or lowest value of the type.
2703
2704 In the comments below, we represent a range by two numbers in brackets
2705 preceded by a "+" to designate being inside that range, or a "-" to
2706 designate being outside that range, so the condition can be inverted by
2707 flipping the prefix. An omitted bound is represented by a "-". For
2708 example, "- [-, 10]" means being outside the range starting at the lowest
2709 possible value and ending at 10, in other words, being greater than 10.
2710 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
2711 always false.
2712
2713 We set up things so that the missing bounds are handled in a consistent
2714 manner so neither a missing bound nor "true" and "false" need to be
2715 handled using a special case. */
2716
2717 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
2718 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
2719 and UPPER1_P are nonzero if the respective argument is an upper bound
2720 and zero for a lower. TYPE, if nonzero, is the type of the result; it
2721 must be specified for a comparison. ARG1 will be converted to ARG0's
2722 type if both are specified. */
2723
2724 static tree
2725 range_binop (code, type, arg0, upper0_p, arg1, upper1_p)
2726 enum tree_code code;
2727 tree type;
2728 tree arg0, arg1;
2729 int upper0_p, upper1_p;
2730 {
2731 tree tem;
2732 int result;
2733 int sgn0, sgn1;
2734
2735 /* If neither arg represents infinity, do the normal operation.
2736 Else, if not a comparison, return infinity. Else handle the special
2737 comparison rules. Note that most of the cases below won't occur, but
2738 are handled for consistency. */
2739
2740 if (arg0 != 0 && arg1 != 0)
2741 {
2742 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
2743 arg0, convert (TREE_TYPE (arg0), arg1)));
2744 STRIP_NOPS (tem);
2745 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
2746 }
2747
2748 if (TREE_CODE_CLASS (code) != '<')
2749 return 0;
2750
2751 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
2752 for neither. Then compute our result treating them as never equal
2753 and comparing bounds to non-bounds as above. */
2754 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
2755 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
2756 switch (code)
2757 {
2758 case EQ_EXPR: case NE_EXPR:
2759 result = (code == NE_EXPR);
2760 break;
2761 case LT_EXPR: case LE_EXPR:
2762 result = sgn0 < sgn1;
2763 break;
2764 case GT_EXPR: case GE_EXPR:
2765 result = sgn0 > sgn1;
2766 break;
2767 default:
2768 abort ();
2769 }
2770
2771 return convert (type, result ? integer_one_node : integer_zero_node);
2772 }
2773 \f
2774 /* Given EXP, a logical expression, set the range it is testing into
2775 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
2776 actually being tested. *PLOW and *PHIGH will have be made the same type
2777 as the returned expression. If EXP is not a comparison, we will most
2778 likely not be returning a useful value and range. */
2779
2780 static tree
2781 make_range (exp, pin_p, plow, phigh)
2782 tree exp;
2783 int *pin_p;
2784 tree *plow, *phigh;
2785 {
2786 enum tree_code code;
2787 tree arg0, arg1, type = NULL_TREE;
2788 int in_p, n_in_p;
2789 tree low, high, n_low, n_high;
2790
2791 /* Start with simply saying "EXP != 0" and then look at the code of EXP
2792 and see if we can refine the range. Some of the cases below may not
2793 happen, but it doesn't seem worth worrying about this. We "continue"
2794 the outer loop when we've changed something; otherwise we "break"
2795 the switch, which will "break" the while. */
2796
2797 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
2798
2799 while (1)
2800 {
2801 code = TREE_CODE (exp);
2802 arg0 = TREE_OPERAND (exp, 0), arg1 = TREE_OPERAND (exp, 1);
2803 if (TREE_CODE_CLASS (code) == '<' || TREE_CODE_CLASS (code) == '1'
2804 || TREE_CODE_CLASS (code) == '2')
2805 type = TREE_TYPE (arg0);
2806
2807 switch (code)
2808 {
2809 case TRUTH_NOT_EXPR:
2810 in_p = ! in_p, exp = arg0;
2811 continue;
2812
2813 case EQ_EXPR: case NE_EXPR:
2814 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
2815 /* We can only do something if the range is testing for zero
2816 and if the second operand is an integer constant. Note that
2817 saying something is "in" the range we make is done by
2818 complementing IN_P since it will set in the initial case of
2819 being not equal to zero; "out" is leaving it alone. */
2820 if (low == 0 || high == 0
2821 || ! integer_zerop (low) || ! integer_zerop (high)
2822 || TREE_CODE (arg1) != INTEGER_CST)
2823 break;
2824
2825 switch (code)
2826 {
2827 case NE_EXPR: /* - [c, c] */
2828 low = high = arg1;
2829 break;
2830 case EQ_EXPR: /* + [c, c] */
2831 in_p = ! in_p, low = high = arg1;
2832 break;
2833 case GT_EXPR: /* - [-, c] */
2834 low = 0, high = arg1;
2835 break;
2836 case GE_EXPR: /* + [c, -] */
2837 in_p = ! in_p, low = arg1, high = 0;
2838 break;
2839 case LT_EXPR: /* - [c, -] */
2840 low = arg1, high = 0;
2841 break;
2842 case LE_EXPR: /* + [-, c] */
2843 in_p = ! in_p, low = 0, high = arg1;
2844 break;
2845 default:
2846 abort ();
2847 }
2848
2849 exp = arg0;
2850
2851 /* If this is an unsigned comparison, we also know that EXP is
2852 greater than or equal to zero. We base the range tests we make
2853 on that fact, so we record it here so we can parse existing
2854 range tests. */
2855 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
2856 {
2857 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
2858 1, convert (type, integer_zero_node),
2859 NULL_TREE))
2860 break;
2861
2862 in_p = n_in_p, low = n_low, high = n_high;
2863
2864 /* If the high bound is missing, reverse the range so it
2865 goes from zero to the low bound minus 1. */
2866 if (high == 0)
2867 {
2868 in_p = ! in_p;
2869 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
2870 integer_one_node, 0);
2871 low = convert (type, integer_zero_node);
2872 }
2873 }
2874 continue;
2875
2876 case NEGATE_EXPR:
2877 /* (-x) IN [a,b] -> x in [-b, -a] */
2878 n_low = range_binop (MINUS_EXPR, type,
2879 convert (type, integer_zero_node), 0, high, 1);
2880 n_high = range_binop (MINUS_EXPR, type,
2881 convert (type, integer_zero_node), 0, low, 0);
2882 low = n_low, high = n_high;
2883 exp = arg0;
2884 continue;
2885
2886 case BIT_NOT_EXPR:
2887 /* ~ X -> -X - 1 */
2888 exp = build (MINUS_EXPR, type, build1 (NEGATE_EXPR, type, arg0),
2889 convert (type, integer_one_node));
2890 continue;
2891
2892 case PLUS_EXPR: case MINUS_EXPR:
2893 if (TREE_CODE (arg1) != INTEGER_CST)
2894 break;
2895
2896 /* If EXP is signed, any overflow in the computation is undefined,
2897 so we don't worry about it so long as our computations on
2898 the bounds don't overflow. For unsigned, overflow is defined
2899 and this is exactly the right thing. */
2900 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
2901 type, low, 0, arg1, 0);
2902 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
2903 type, high, 1, arg1, 0);
2904 if ((n_low != 0 && TREE_OVERFLOW (n_low))
2905 || (n_high != 0 && TREE_OVERFLOW (n_high)))
2906 break;
2907
2908 /* Check for an unsigned range which has wrapped around the maximum
2909 value thus making n_high < n_low, and normalize it. */
2910 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
2911 {
2912 low = range_binop (PLUS_EXPR, type, n_high, 0,
2913 integer_one_node, 0);
2914 high = range_binop (MINUS_EXPR, type, n_low, 0,
2915 integer_one_node, 0);
2916 in_p = ! in_p;
2917 }
2918 else
2919 low = n_low, high = n_high;
2920
2921 exp = arg0;
2922 continue;
2923
2924 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
2925 if (! INTEGRAL_TYPE_P (type)
2926 || (low != 0 && ! int_fits_type_p (low, type))
2927 || (high != 0 && ! int_fits_type_p (high, type)))
2928 break;
2929
2930 n_low = low, n_high = high;
2931
2932 if (n_low != 0)
2933 n_low = convert (type, n_low);
2934
2935 if (n_high != 0)
2936 n_high = convert (type, n_high);
2937
2938 /* If we're converting from an unsigned to a signed type,
2939 we will be doing the comparison as unsigned. The tests above
2940 have already verified that LOW and HIGH are both positive.
2941
2942 So we have to make sure that the original unsigned value will
2943 be interpreted as positive. */
2944 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
2945 {
2946 tree equiv_type = type_for_mode (TYPE_MODE (type), 1);
2947 tree high_positive;
2948
2949 /* A range without an upper bound is, naturally, unbounded.
2950 Since convert would have cropped a very large value, use
2951 the max value for the destination type. */
2952
2953 high_positive = TYPE_MAX_VALUE (equiv_type);
2954 if (!high_positive)
2955 {
2956 high_positive = TYPE_MAX_VALUE (type);
2957 if (!high_positive)
2958 abort();
2959 }
2960 high_positive = fold (build (RSHIFT_EXPR, type,
2961 convert (type, high_positive),
2962 convert (type, integer_one_node)));
2963
2964 /* If the low bound is specified, "and" the range with the
2965 range for which the original unsigned value will be
2966 positive. */
2967 if (low != 0)
2968 {
2969 if (! merge_ranges (&n_in_p, &n_low, &n_high,
2970 1, n_low, n_high,
2971 1, convert (type, integer_zero_node),
2972 high_positive))
2973 break;
2974
2975 in_p = (n_in_p == in_p);
2976 }
2977 else
2978 {
2979 /* Otherwise, "or" the range with the range of the input
2980 that will be interpreted as negative. */
2981 if (! merge_ranges (&n_in_p, &n_low, &n_high,
2982 0, n_low, n_high,
2983 1, convert (type, integer_zero_node),
2984 high_positive))
2985 break;
2986
2987 in_p = (in_p != n_in_p);
2988 }
2989 }
2990
2991 exp = arg0;
2992 low = n_low, high = n_high;
2993 continue;
2994
2995 default:
2996 break;
2997 }
2998
2999 break;
3000 }
3001
3002 /* If EXP is a constant, we can evaluate whether this is true or false. */
3003 if (TREE_CODE (exp) == INTEGER_CST)
3004 {
3005 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3006 exp, 0, low, 0))
3007 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3008 exp, 1, high, 1)));
3009 low = high = 0;
3010 exp = 0;
3011 }
3012
3013 *pin_p = in_p, *plow = low, *phigh = high;
3014 return exp;
3015 }
3016 \f
3017 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3018 type, TYPE, return an expression to test if EXP is in (or out of, depending
3019 on IN_P) the range. */
3020
3021 static tree
3022 build_range_check (type, exp, in_p, low, high)
3023 tree type;
3024 tree exp;
3025 int in_p;
3026 tree low, high;
3027 {
3028 tree etype = TREE_TYPE (exp);
3029 tree utype, value;
3030
3031 if (! in_p
3032 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3033 return invert_truthvalue (value);
3034
3035 else if (low == 0 && high == 0)
3036 return convert (type, integer_one_node);
3037
3038 else if (low == 0)
3039 return fold (build (LE_EXPR, type, exp, high));
3040
3041 else if (high == 0)
3042 return fold (build (GE_EXPR, type, exp, low));
3043
3044 else if (operand_equal_p (low, high, 0))
3045 return fold (build (EQ_EXPR, type, exp, low));
3046
3047 else if (TREE_UNSIGNED (etype) && integer_zerop (low))
3048 return build_range_check (type, exp, 1, 0, high);
3049
3050 else if (integer_zerop (low))
3051 {
3052 utype = unsigned_type (etype);
3053 return build_range_check (type, convert (utype, exp), 1, 0,
3054 convert (utype, high));
3055 }
3056
3057 else if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3058 && ! TREE_OVERFLOW (value))
3059 return build_range_check (type,
3060 fold (build (MINUS_EXPR, etype, exp, low)),
3061 1, convert (etype, integer_zero_node), value);
3062 else
3063 return 0;
3064 }
3065 \f
3066 /* Given two ranges, see if we can merge them into one. Return 1 if we
3067 can, 0 if we can't. Set the output range into the specified parameters. */
3068
3069 static int
3070 merge_ranges (pin_p, plow, phigh, in0_p, low0, high0, in1_p, low1, high1)
3071 int *pin_p;
3072 tree *plow, *phigh;
3073 int in0_p, in1_p;
3074 tree low0, high0, low1, high1;
3075 {
3076 int no_overlap;
3077 int subset;
3078 int temp;
3079 tree tem;
3080 int in_p;
3081 tree low, high;
3082 int lowequal = ((low0 == 0 && low1 == 0)
3083 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3084 low0, 0, low1, 0)));
3085 int highequal = ((high0 == 0 && high1 == 0)
3086 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3087 high0, 1, high1, 1)));
3088
3089 /* Make range 0 be the range that starts first, or ends last if they
3090 start at the same value. Swap them if it isn't. */
3091 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3092 low0, 0, low1, 0))
3093 || (lowequal
3094 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3095 high1, 1, high0, 1))))
3096 {
3097 temp = in0_p, in0_p = in1_p, in1_p = temp;
3098 tem = low0, low0 = low1, low1 = tem;
3099 tem = high0, high0 = high1, high1 = tem;
3100 }
3101
3102 /* Now flag two cases, whether the ranges are disjoint or whether the
3103 second range is totally subsumed in the first. Note that the tests
3104 below are simplified by the ones above. */
3105 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3106 high0, 1, low1, 0));
3107 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3108 high1, 1, high0, 1));
3109
3110 /* We now have four cases, depending on whether we are including or
3111 excluding the two ranges. */
3112 if (in0_p && in1_p)
3113 {
3114 /* If they don't overlap, the result is false. If the second range
3115 is a subset it is the result. Otherwise, the range is from the start
3116 of the second to the end of the first. */
3117 if (no_overlap)
3118 in_p = 0, low = high = 0;
3119 else if (subset)
3120 in_p = 1, low = low1, high = high1;
3121 else
3122 in_p = 1, low = low1, high = high0;
3123 }
3124
3125 else if (in0_p && ! in1_p)
3126 {
3127 /* If they don't overlap, the result is the first range. If they are
3128 equal, the result is false. If the second range is a subset of the
3129 first, and the ranges begin at the same place, we go from just after
3130 the end of the first range to the end of the second. If the second
3131 range is not a subset of the first, or if it is a subset and both
3132 ranges end at the same place, the range starts at the start of the
3133 first range and ends just before the second range.
3134 Otherwise, we can't describe this as a single range. */
3135 if (no_overlap)
3136 in_p = 1, low = low0, high = high0;
3137 else if (lowequal && highequal)
3138 in_p = 0, low = high = 0;
3139 else if (subset && lowequal)
3140 {
3141 in_p = 1, high = high0;
3142 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3143 integer_one_node, 0);
3144 }
3145 else if (! subset || highequal)
3146 {
3147 in_p = 1, low = low0;
3148 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3149 integer_one_node, 0);
3150 }
3151 else
3152 return 0;
3153 }
3154
3155 else if (! in0_p && in1_p)
3156 {
3157 /* If they don't overlap, the result is the second range. If the second
3158 is a subset of the first, the result is false. Otherwise,
3159 the range starts just after the first range and ends at the
3160 end of the second. */
3161 if (no_overlap)
3162 in_p = 1, low = low1, high = high1;
3163 else if (subset)
3164 in_p = 0, low = high = 0;
3165 else
3166 {
3167 in_p = 1, high = high1;
3168 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3169 integer_one_node, 0);
3170 }
3171 }
3172
3173 else
3174 {
3175 /* The case where we are excluding both ranges. Here the complex case
3176 is if they don't overlap. In that case, the only time we have a
3177 range is if they are adjacent. If the second is a subset of the
3178 first, the result is the first. Otherwise, the range to exclude
3179 starts at the beginning of the first range and ends at the end of the
3180 second. */
3181 if (no_overlap)
3182 {
3183 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3184 range_binop (PLUS_EXPR, NULL_TREE,
3185 high0, 1,
3186 integer_one_node, 1),
3187 1, low1, 0)))
3188 in_p = 0, low = low0, high = high1;
3189 else
3190 return 0;
3191 }
3192 else if (subset)
3193 in_p = 0, low = low0, high = high0;
3194 else
3195 in_p = 0, low = low0, high = high1;
3196 }
3197
3198 *pin_p = in_p, *plow = low, *phigh = high;
3199 return 1;
3200 }
3201 \f
3202 /* EXP is some logical combination of boolean tests. See if we can
3203 merge it into some range test. Return the new tree if so. */
3204
3205 static tree
3206 fold_range_test (exp)
3207 tree exp;
3208 {
3209 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3210 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3211 int in0_p, in1_p, in_p;
3212 tree low0, low1, low, high0, high1, high;
3213 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3214 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3215 tree tem;
3216
3217 /* If this is an OR operation, invert both sides; we will invert
3218 again at the end. */
3219 if (or_op)
3220 in0_p = ! in0_p, in1_p = ! in1_p;
3221
3222 /* If both expressions are the same, if we can merge the ranges, and we
3223 can build the range test, return it or it inverted. If one of the
3224 ranges is always true or always false, consider it to be the same
3225 expression as the other. */
3226 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3227 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3228 in1_p, low1, high1)
3229 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3230 lhs != 0 ? lhs
3231 : rhs != 0 ? rhs : integer_zero_node,
3232 in_p, low, high))))
3233 return or_op ? invert_truthvalue (tem) : tem;
3234
3235 /* On machines where the branch cost is expensive, if this is a
3236 short-circuited branch and the underlying object on both sides
3237 is the same, make a non-short-circuit operation. */
3238 else if (BRANCH_COST >= 2
3239 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3240 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3241 && operand_equal_p (lhs, rhs, 0))
3242 {
3243 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3244 unless we are at top level, in which case we can't do this. */
3245 if (simple_operand_p (lhs))
3246 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3247 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3248 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3249 TREE_OPERAND (exp, 1));
3250
3251 else if (current_function_decl != 0)
3252 {
3253 tree common = save_expr (lhs);
3254
3255 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3256 or_op ? ! in0_p : in0_p,
3257 low0, high0))
3258 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3259 or_op ? ! in1_p : in1_p,
3260 low1, high1))))
3261 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3262 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3263 TREE_TYPE (exp), lhs, rhs);
3264 }
3265 }
3266
3267
3268 return 0;
3269 }
3270 \f
3271 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3272 bit value. Arrange things so the extra bits will be set to zero if and
3273 only if C is signed-extended to its full width. If MASK is nonzero,
3274 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3275
3276 static tree
3277 unextend (c, p, unsignedp, mask)
3278 tree c;
3279 int p;
3280 int unsignedp;
3281 tree mask;
3282 {
3283 tree type = TREE_TYPE (c);
3284 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3285 tree temp;
3286
3287 if (p == modesize || unsignedp)
3288 return c;
3289
3290 /* We work by getting just the sign bit into the low-order bit, then
3291 into the high-order bit, then sign-extend. We then XOR that value
3292 with C. */
3293 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3294 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3295
3296 /* We must use a signed type in order to get an arithmetic right shift.
3297 However, we must also avoid introducing accidental overflows, so that
3298 a subsequent call to integer_zerop will work. Hence we must
3299 do the type conversion here. At this point, the constant is either
3300 zero or one, and the conversion to a signed type can never overflow.
3301 We could get an overflow if this conversion is done anywhere else. */
3302 if (TREE_UNSIGNED (type))
3303 temp = convert (signed_type (type), temp);
3304
3305 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3306 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3307 if (mask != 0)
3308 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3309 /* If necessary, convert the type back to match the type of C. */
3310 if (TREE_UNSIGNED (type))
3311 temp = convert (type, temp);
3312
3313 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3314 }
3315 \f
3316 /* Find ways of folding logical expressions of LHS and RHS:
3317 Try to merge two comparisons to the same innermost item.
3318 Look for range tests like "ch >= '0' && ch <= '9'".
3319 Look for combinations of simple terms on machines with expensive branches
3320 and evaluate the RHS unconditionally.
3321
3322 For example, if we have p->a == 2 && p->b == 4 and we can make an
3323 object large enough to span both A and B, we can do this with a comparison
3324 against the object ANDed with the a mask.
3325
3326 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3327 operations to do this with one comparison.
3328
3329 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3330 function and the one above.
3331
3332 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3333 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3334
3335 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3336 two operands.
3337
3338 We return the simplified tree or 0 if no optimization is possible. */
3339
3340 static tree
3341 fold_truthop (code, truth_type, lhs, rhs)
3342 enum tree_code code;
3343 tree truth_type, lhs, rhs;
3344 {
3345 /* If this is the "or" of two comparisons, we can do something if we
3346 the comparisons are NE_EXPR. If this is the "and", we can do something
3347 if the comparisons are EQ_EXPR. I.e.,
3348 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3349
3350 WANTED_CODE is this operation code. For single bit fields, we can
3351 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3352 comparison for one-bit fields. */
3353
3354 enum tree_code wanted_code;
3355 enum tree_code lcode, rcode;
3356 tree ll_arg, lr_arg, rl_arg, rr_arg;
3357 tree ll_inner, lr_inner, rl_inner, rr_inner;
3358 int ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3359 int rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3360 int xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3361 int lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3362 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3363 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3364 enum machine_mode lnmode, rnmode;
3365 tree ll_mask, lr_mask, rl_mask, rr_mask;
3366 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3367 tree l_const, r_const;
3368 tree type, result;
3369 int first_bit, end_bit;
3370 int volatilep;
3371
3372 /* Start by getting the comparison codes. Fail if anything is volatile.
3373 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3374 it were surrounded with a NE_EXPR. */
3375
3376 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3377 return 0;
3378
3379 lcode = TREE_CODE (lhs);
3380 rcode = TREE_CODE (rhs);
3381
3382 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3383 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3384
3385 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3386 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3387
3388 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3389 return 0;
3390
3391 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3392 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3393
3394 ll_arg = TREE_OPERAND (lhs, 0);
3395 lr_arg = TREE_OPERAND (lhs, 1);
3396 rl_arg = TREE_OPERAND (rhs, 0);
3397 rr_arg = TREE_OPERAND (rhs, 1);
3398
3399 /* If the RHS can be evaluated unconditionally and its operands are
3400 simple, it wins to evaluate the RHS unconditionally on machines
3401 with expensive branches. In this case, this isn't a comparison
3402 that can be merged. */
3403
3404 /* @@ I'm not sure it wins on the m88110 to do this if the comparisons
3405 are with zero (tmw). */
3406
3407 if (BRANCH_COST >= 2
3408 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
3409 && simple_operand_p (rl_arg)
3410 && simple_operand_p (rr_arg))
3411 return build (code, truth_type, lhs, rhs);
3412
3413 /* See if the comparisons can be merged. Then get all the parameters for
3414 each side. */
3415
3416 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3417 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3418 return 0;
3419
3420 volatilep = 0;
3421 ll_inner = decode_field_reference (ll_arg,
3422 &ll_bitsize, &ll_bitpos, &ll_mode,
3423 &ll_unsignedp, &volatilep, &ll_mask,
3424 &ll_and_mask);
3425 lr_inner = decode_field_reference (lr_arg,
3426 &lr_bitsize, &lr_bitpos, &lr_mode,
3427 &lr_unsignedp, &volatilep, &lr_mask,
3428 &lr_and_mask);
3429 rl_inner = decode_field_reference (rl_arg,
3430 &rl_bitsize, &rl_bitpos, &rl_mode,
3431 &rl_unsignedp, &volatilep, &rl_mask,
3432 &rl_and_mask);
3433 rr_inner = decode_field_reference (rr_arg,
3434 &rr_bitsize, &rr_bitpos, &rr_mode,
3435 &rr_unsignedp, &volatilep, &rr_mask,
3436 &rr_and_mask);
3437
3438 /* It must be true that the inner operation on the lhs of each
3439 comparison must be the same if we are to be able to do anything.
3440 Then see if we have constants. If not, the same must be true for
3441 the rhs's. */
3442 if (volatilep || ll_inner == 0 || rl_inner == 0
3443 || ! operand_equal_p (ll_inner, rl_inner, 0))
3444 return 0;
3445
3446 if (TREE_CODE (lr_arg) == INTEGER_CST
3447 && TREE_CODE (rr_arg) == INTEGER_CST)
3448 l_const = lr_arg, r_const = rr_arg;
3449 else if (lr_inner == 0 || rr_inner == 0
3450 || ! operand_equal_p (lr_inner, rr_inner, 0))
3451 return 0;
3452 else
3453 l_const = r_const = 0;
3454
3455 /* If either comparison code is not correct for our logical operation,
3456 fail. However, we can convert a one-bit comparison against zero into
3457 the opposite comparison against that bit being set in the field. */
3458
3459 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3460 if (lcode != wanted_code)
3461 {
3462 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3463 {
3464 if (ll_unsignedp || tree_log2 (ll_mask) + 1 < ll_bitsize)
3465 l_const = ll_mask;
3466 else
3467 /* Since ll_arg is a single bit bit mask, we can sign extend
3468 it appropriately with a NEGATE_EXPR.
3469 l_const is made a signed value here, but since for l_const != NULL
3470 lr_unsignedp is not used, we don't need to clear the latter. */
3471 l_const = fold (build1 (NEGATE_EXPR, TREE_TYPE (ll_arg),
3472 convert (TREE_TYPE (ll_arg), ll_mask)));
3473 }
3474 else
3475 return 0;
3476 }
3477
3478 if (rcode != wanted_code)
3479 {
3480 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3481 {
3482 if (rl_unsignedp || tree_log2 (rl_mask) + 1 < rl_bitsize)
3483 r_const = rl_mask;
3484 else
3485 /* This is analogous to the code for l_const above. */
3486 r_const = fold (build1 (NEGATE_EXPR, TREE_TYPE (rl_arg),
3487 convert (TREE_TYPE (rl_arg), rl_mask)));
3488 }
3489 else
3490 return 0;
3491 }
3492
3493 /* See if we can find a mode that contains both fields being compared on
3494 the left. If we can't, fail. Otherwise, update all constants and masks
3495 to be relative to a field of that size. */
3496 first_bit = MIN (ll_bitpos, rl_bitpos);
3497 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3498 lnmode = get_best_mode (end_bit - first_bit, first_bit,
3499 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3500 volatilep);
3501 if (lnmode == VOIDmode)
3502 return 0;
3503
3504 lnbitsize = GET_MODE_BITSIZE (lnmode);
3505 lnbitpos = first_bit & ~ (lnbitsize - 1);
3506 type = type_for_size (lnbitsize, 1);
3507 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3508
3509 if (BYTES_BIG_ENDIAN)
3510 {
3511 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3512 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3513 }
3514
3515 ll_mask = const_binop (LSHIFT_EXPR, convert (type, ll_mask),
3516 size_int (xll_bitpos), 0);
3517 rl_mask = const_binop (LSHIFT_EXPR, convert (type, rl_mask),
3518 size_int (xrl_bitpos), 0);
3519
3520 if (l_const)
3521 {
3522 l_const = convert (type, l_const);
3523 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3524 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3525 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3526 fold (build1 (BIT_NOT_EXPR,
3527 type, ll_mask)),
3528 0)))
3529 {
3530 warning ("comparison is always %s",
3531 wanted_code == NE_EXPR ? "one" : "zero");
3532
3533 return convert (truth_type,
3534 wanted_code == NE_EXPR
3535 ? integer_one_node : integer_zero_node);
3536 }
3537 }
3538 if (r_const)
3539 {
3540 r_const = convert (type, r_const);
3541 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3542 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3543 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3544 fold (build1 (BIT_NOT_EXPR,
3545 type, rl_mask)),
3546 0)))
3547 {
3548 warning ("comparison is always %s",
3549 wanted_code == NE_EXPR ? "one" : "zero");
3550
3551 return convert (truth_type,
3552 wanted_code == NE_EXPR
3553 ? integer_one_node : integer_zero_node);
3554 }
3555 }
3556
3557 /* If the right sides are not constant, do the same for it. Also,
3558 disallow this optimization if a size or signedness mismatch occurs
3559 between the left and right sides. */
3560 if (l_const == 0)
3561 {
3562 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3563 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3564 /* Make sure the two fields on the right
3565 correspond to the left without being swapped. */
3566 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3567 return 0;
3568
3569 first_bit = MIN (lr_bitpos, rr_bitpos);
3570 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3571 rnmode = get_best_mode (end_bit - first_bit, first_bit,
3572 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3573 volatilep);
3574 if (rnmode == VOIDmode)
3575 return 0;
3576
3577 rnbitsize = GET_MODE_BITSIZE (rnmode);
3578 rnbitpos = first_bit & ~ (rnbitsize - 1);
3579 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3580
3581 if (BYTES_BIG_ENDIAN)
3582 {
3583 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3584 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3585 }
3586
3587 lr_mask = const_binop (LSHIFT_EXPR, convert (type, lr_mask),
3588 size_int (xlr_bitpos), 0);
3589 rr_mask = const_binop (LSHIFT_EXPR, convert (type, rr_mask),
3590 size_int (xrr_bitpos), 0);
3591
3592 /* Make a mask that corresponds to both fields being compared.
3593 Do this for both items being compared. If the masks agree,
3594 we can do this by masking both and comparing the masked
3595 results. */
3596 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3597 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3598 if (operand_equal_p (ll_mask, lr_mask, 0) && lnbitsize == rnbitsize)
3599 {
3600 lhs = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
3601 ll_unsignedp || rl_unsignedp);
3602 rhs = make_bit_field_ref (lr_inner, type, rnbitsize, rnbitpos,
3603 lr_unsignedp || rr_unsignedp);
3604 if (! all_ones_mask_p (ll_mask, lnbitsize))
3605 {
3606 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3607 rhs = build (BIT_AND_EXPR, type, rhs, ll_mask);
3608 }
3609 return build (wanted_code, truth_type, lhs, rhs);
3610 }
3611
3612 /* There is still another way we can do something: If both pairs of
3613 fields being compared are adjacent, we may be able to make a wider
3614 field containing them both. */
3615 if ((ll_bitsize + ll_bitpos == rl_bitpos
3616 && lr_bitsize + lr_bitpos == rr_bitpos)
3617 || (ll_bitpos == rl_bitpos + rl_bitsize
3618 && lr_bitpos == rr_bitpos + rr_bitsize))
3619 return build (wanted_code, truth_type,
3620 make_bit_field_ref (ll_inner, type,
3621 ll_bitsize + rl_bitsize,
3622 MIN (ll_bitpos, rl_bitpos),
3623 ll_unsignedp),
3624 make_bit_field_ref (lr_inner, type,
3625 lr_bitsize + rr_bitsize,
3626 MIN (lr_bitpos, rr_bitpos),
3627 lr_unsignedp));
3628
3629 return 0;
3630 }
3631
3632 /* Handle the case of comparisons with constants. If there is something in
3633 common between the masks, those bits of the constants must be the same.
3634 If not, the condition is always false. Test for this to avoid generating
3635 incorrect code below. */
3636 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3637 if (! integer_zerop (result)
3638 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3639 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3640 {
3641 if (wanted_code == NE_EXPR)
3642 {
3643 warning ("`or' of unmatched not-equal tests is always 1");
3644 return convert (truth_type, integer_one_node);
3645 }
3646 else
3647 {
3648 warning ("`and' of mutually exclusive equal-tests is always zero");
3649 return convert (truth_type, integer_zero_node);
3650 }
3651 }
3652
3653 /* Construct the expression we will return. First get the component
3654 reference we will make. Unless the mask is all ones the width of
3655 that field, perform the mask operation. Then compare with the
3656 merged constant. */
3657 result = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
3658 ll_unsignedp || rl_unsignedp);
3659
3660 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3661 if (! all_ones_mask_p (ll_mask, lnbitsize))
3662 result = build (BIT_AND_EXPR, type, result, ll_mask);
3663
3664 return build (wanted_code, truth_type, result,
3665 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
3666 }
3667 \f
3668 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
3669 S, a SAVE_EXPR, return the expression actually being evaluated. Note
3670 that we may sometimes modify the tree. */
3671
3672 static tree
3673 strip_compound_expr (t, s)
3674 tree t;
3675 tree s;
3676 {
3677 enum tree_code code = TREE_CODE (t);
3678
3679 /* See if this is the COMPOUND_EXPR we want to eliminate. */
3680 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
3681 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
3682 return TREE_OPERAND (t, 1);
3683
3684 /* See if this is a COND_EXPR or a simple arithmetic operator. We
3685 don't bother handling any other types. */
3686 else if (code == COND_EXPR)
3687 {
3688 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3689 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
3690 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
3691 }
3692 else if (TREE_CODE_CLASS (code) == '1')
3693 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3694 else if (TREE_CODE_CLASS (code) == '<'
3695 || TREE_CODE_CLASS (code) == '2')
3696 {
3697 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3698 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
3699 }
3700
3701 return t;
3702 }
3703 \f
3704 /* Perform constant folding and related simplification of EXPR.
3705 The related simplifications include x*1 => x, x*0 => 0, etc.,
3706 and application of the associative law.
3707 NOP_EXPR conversions may be removed freely (as long as we
3708 are careful not to change the C type of the overall expression)
3709 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
3710 but we can constant-fold them if they have constant operands. */
3711
3712 tree
3713 fold (expr)
3714 tree expr;
3715 {
3716 register tree t = expr;
3717 tree t1 = NULL_TREE;
3718 tree tem;
3719 tree type = TREE_TYPE (expr);
3720 register tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3721 register enum tree_code code = TREE_CODE (t);
3722 register int kind;
3723 int invert;
3724
3725 /* WINS will be nonzero when the switch is done
3726 if all operands are constant. */
3727
3728 int wins = 1;
3729
3730 /* Don't try to process an RTL_EXPR since its operands aren't trees.
3731 Likewise for a SAVE_EXPR that's already been evaluated. */
3732 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t)) != 0)
3733 return t;
3734
3735 /* Return right away if already constant. */
3736 if (TREE_CONSTANT (t))
3737 {
3738 if (code == CONST_DECL)
3739 return DECL_INITIAL (t);
3740 return t;
3741 }
3742
3743 kind = TREE_CODE_CLASS (code);
3744 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
3745 {
3746 tree subop;
3747
3748 /* Special case for conversion ops that can have fixed point args. */
3749 arg0 = TREE_OPERAND (t, 0);
3750
3751 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
3752 if (arg0 != 0)
3753 STRIP_TYPE_NOPS (arg0);
3754
3755 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
3756 subop = TREE_REALPART (arg0);
3757 else
3758 subop = arg0;
3759
3760 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
3761 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3762 && TREE_CODE (subop) != REAL_CST
3763 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3764 )
3765 /* Note that TREE_CONSTANT isn't enough:
3766 static var addresses are constant but we can't
3767 do arithmetic on them. */
3768 wins = 0;
3769 }
3770 else if (kind == 'e' || kind == '<'
3771 || kind == '1' || kind == '2' || kind == 'r')
3772 {
3773 register int len = tree_code_length[(int) code];
3774 register int i;
3775 for (i = 0; i < len; i++)
3776 {
3777 tree op = TREE_OPERAND (t, i);
3778 tree subop;
3779
3780 if (op == 0)
3781 continue; /* Valid for CALL_EXPR, at least. */
3782
3783 if (kind == '<' || code == RSHIFT_EXPR)
3784 {
3785 /* Signedness matters here. Perhaps we can refine this
3786 later. */
3787 STRIP_TYPE_NOPS (op);
3788 }
3789 else
3790 {
3791 /* Strip any conversions that don't change the mode. */
3792 STRIP_NOPS (op);
3793 }
3794
3795 if (TREE_CODE (op) == COMPLEX_CST)
3796 subop = TREE_REALPART (op);
3797 else
3798 subop = op;
3799
3800 if (TREE_CODE (subop) != INTEGER_CST
3801 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3802 && TREE_CODE (subop) != REAL_CST
3803 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3804 )
3805 /* Note that TREE_CONSTANT isn't enough:
3806 static var addresses are constant but we can't
3807 do arithmetic on them. */
3808 wins = 0;
3809
3810 if (i == 0)
3811 arg0 = op;
3812 else if (i == 1)
3813 arg1 = op;
3814 }
3815 }
3816
3817 /* If this is a commutative operation, and ARG0 is a constant, move it
3818 to ARG1 to reduce the number of tests below. */
3819 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
3820 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
3821 || code == BIT_AND_EXPR)
3822 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
3823 {
3824 tem = arg0; arg0 = arg1; arg1 = tem;
3825
3826 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
3827 TREE_OPERAND (t, 1) = tem;
3828 }
3829
3830 /* Now WINS is set as described above,
3831 ARG0 is the first operand of EXPR,
3832 and ARG1 is the second operand (if it has more than one operand).
3833
3834 First check for cases where an arithmetic operation is applied to a
3835 compound, conditional, or comparison operation. Push the arithmetic
3836 operation inside the compound or conditional to see if any folding
3837 can then be done. Convert comparison to conditional for this purpose.
3838 The also optimizes non-constant cases that used to be done in
3839 expand_expr.
3840
3841 Before we do that, see if this is a BIT_AND_EXPR or a BIT_OR_EXPR,
3842 one of the operands is a comparison and the other is a comparison, a
3843 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
3844 code below would make the expression more complex. Change it to a
3845 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
3846 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
3847
3848 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
3849 || code == EQ_EXPR || code == NE_EXPR)
3850 && ((truth_value_p (TREE_CODE (arg0))
3851 && (truth_value_p (TREE_CODE (arg1))
3852 || (TREE_CODE (arg1) == BIT_AND_EXPR
3853 && integer_onep (TREE_OPERAND (arg1, 1)))))
3854 || (truth_value_p (TREE_CODE (arg1))
3855 && (truth_value_p (TREE_CODE (arg0))
3856 || (TREE_CODE (arg0) == BIT_AND_EXPR
3857 && integer_onep (TREE_OPERAND (arg0, 1)))))))
3858 {
3859 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
3860 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
3861 : TRUTH_XOR_EXPR,
3862 type, arg0, arg1));
3863
3864 if (code == EQ_EXPR)
3865 t = invert_truthvalue (t);
3866
3867 return t;
3868 }
3869
3870 if (TREE_CODE_CLASS (code) == '1')
3871 {
3872 if (TREE_CODE (arg0) == COMPOUND_EXPR)
3873 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3874 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
3875 else if (TREE_CODE (arg0) == COND_EXPR)
3876 {
3877 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
3878 fold (build1 (code, type, TREE_OPERAND (arg0, 1))),
3879 fold (build1 (code, type, TREE_OPERAND (arg0, 2)))));
3880
3881 /* If this was a conversion, and all we did was to move into
3882 inside the COND_EXPR, bring it back out. But leave it if
3883 it is a conversion from integer to integer and the
3884 result precision is no wider than a word since such a
3885 conversion is cheap and may be optimized away by combine,
3886 while it couldn't if it were outside the COND_EXPR. Then return
3887 so we don't get into an infinite recursion loop taking the
3888 conversion out and then back in. */
3889
3890 if ((code == NOP_EXPR || code == CONVERT_EXPR
3891 || code == NON_LVALUE_EXPR)
3892 && TREE_CODE (t) == COND_EXPR
3893 && TREE_CODE (TREE_OPERAND (t, 1)) == code
3894 && TREE_CODE (TREE_OPERAND (t, 2)) == code
3895 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
3896 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
3897 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
3898 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0)))
3899 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
3900 t = build1 (code, type,
3901 build (COND_EXPR,
3902 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0)),
3903 TREE_OPERAND (t, 0),
3904 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
3905 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
3906 return t;
3907 }
3908 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
3909 return fold (build (COND_EXPR, type, arg0,
3910 fold (build1 (code, type, integer_one_node)),
3911 fold (build1 (code, type, integer_zero_node))));
3912 }
3913 else if (TREE_CODE_CLASS (code) == '2'
3914 || TREE_CODE_CLASS (code) == '<')
3915 {
3916 if (TREE_CODE (arg1) == COMPOUND_EXPR)
3917 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
3918 fold (build (code, type,
3919 arg0, TREE_OPERAND (arg1, 1))));
3920 else if ((TREE_CODE (arg1) == COND_EXPR
3921 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
3922 && TREE_CODE_CLASS (code) != '<'))
3923 && (! TREE_SIDE_EFFECTS (arg0) || current_function_decl != 0))
3924 {
3925 tree test, true_value, false_value;
3926
3927 if (TREE_CODE (arg1) == COND_EXPR)
3928 {
3929 test = TREE_OPERAND (arg1, 0);
3930 true_value = TREE_OPERAND (arg1, 1);
3931 false_value = TREE_OPERAND (arg1, 2);
3932 }
3933 else
3934 {
3935 tree testtype = TREE_TYPE (arg1);
3936 test = arg1;
3937 true_value = convert (testtype, integer_one_node);
3938 false_value = convert (testtype, integer_zero_node);
3939 }
3940
3941 /* If ARG0 is complex we want to make sure we only evaluate
3942 it once. Though this is only required if it is volatile, it
3943 might be more efficient even if it is not. However, if we
3944 succeed in folding one part to a constant, we do not need
3945 to make this SAVE_EXPR. Since we do this optimization
3946 primarily to see if we do end up with constant and this
3947 SAVE_EXPR interferes with later optimizations, suppressing
3948 it when we can is important. */
3949
3950 if (TREE_CODE (arg0) != SAVE_EXPR
3951 && ((TREE_CODE (arg0) != VAR_DECL
3952 && TREE_CODE (arg0) != PARM_DECL)
3953 || TREE_SIDE_EFFECTS (arg0)))
3954 {
3955 tree lhs = fold (build (code, type, arg0, true_value));
3956 tree rhs = fold (build (code, type, arg0, false_value));
3957
3958 if (TREE_CONSTANT (lhs) || TREE_CONSTANT (rhs))
3959 return fold (build (COND_EXPR, type, test, lhs, rhs));
3960
3961 if (current_function_decl != 0)
3962 arg0 = save_expr (arg0);
3963 }
3964
3965 test = fold (build (COND_EXPR, type, test,
3966 fold (build (code, type, arg0, true_value)),
3967 fold (build (code, type, arg0, false_value))));
3968 if (TREE_CODE (arg0) == SAVE_EXPR)
3969 return build (COMPOUND_EXPR, type,
3970 convert (void_type_node, arg0),
3971 strip_compound_expr (test, arg0));
3972 else
3973 return convert (type, test);
3974 }
3975
3976 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
3977 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3978 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
3979 else if ((TREE_CODE (arg0) == COND_EXPR
3980 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
3981 && TREE_CODE_CLASS (code) != '<'))
3982 && (! TREE_SIDE_EFFECTS (arg1) || current_function_decl != 0))
3983 {
3984 tree test, true_value, false_value;
3985
3986 if (TREE_CODE (arg0) == COND_EXPR)
3987 {
3988 test = TREE_OPERAND (arg0, 0);
3989 true_value = TREE_OPERAND (arg0, 1);
3990 false_value = TREE_OPERAND (arg0, 2);
3991 }
3992 else
3993 {
3994 tree testtype = TREE_TYPE (arg0);
3995 test = arg0;
3996 true_value = convert (testtype, integer_one_node);
3997 false_value = convert (testtype, integer_zero_node);
3998 }
3999
4000 if (TREE_CODE (arg1) != SAVE_EXPR
4001 && ((TREE_CODE (arg1) != VAR_DECL
4002 && TREE_CODE (arg1) != PARM_DECL)
4003 || TREE_SIDE_EFFECTS (arg1)))
4004 {
4005 tree lhs = fold (build (code, type, true_value, arg1));
4006 tree rhs = fold (build (code, type, false_value, arg1));
4007
4008 if (TREE_CONSTANT (lhs) || TREE_CONSTANT (rhs)
4009 || TREE_CONSTANT (arg1))
4010 return fold (build (COND_EXPR, type, test, lhs, rhs));
4011
4012 if (current_function_decl != 0)
4013 arg1 = save_expr (arg1);
4014 }
4015
4016 test = fold (build (COND_EXPR, type, test,
4017 fold (build (code, type, true_value, arg1)),
4018 fold (build (code, type, false_value, arg1))));
4019 if (TREE_CODE (arg1) == SAVE_EXPR)
4020 return build (COMPOUND_EXPR, type,
4021 convert (void_type_node, arg1),
4022 strip_compound_expr (test, arg1));
4023 else
4024 return convert (type, test);
4025 }
4026 }
4027 else if (TREE_CODE_CLASS (code) == '<'
4028 && TREE_CODE (arg0) == COMPOUND_EXPR)
4029 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4030 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4031 else if (TREE_CODE_CLASS (code) == '<'
4032 && TREE_CODE (arg1) == COMPOUND_EXPR)
4033 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4034 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
4035
4036 switch (code)
4037 {
4038 case INTEGER_CST:
4039 case REAL_CST:
4040 case STRING_CST:
4041 case COMPLEX_CST:
4042 case CONSTRUCTOR:
4043 return t;
4044
4045 case CONST_DECL:
4046 return fold (DECL_INITIAL (t));
4047
4048 case NOP_EXPR:
4049 case FLOAT_EXPR:
4050 case CONVERT_EXPR:
4051 case FIX_TRUNC_EXPR:
4052 /* Other kinds of FIX are not handled properly by fold_convert. */
4053
4054 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
4055 return TREE_OPERAND (t, 0);
4056
4057 /* Handle cases of two conversions in a row. */
4058 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
4059 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
4060 {
4061 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4062 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
4063 tree final_type = TREE_TYPE (t);
4064 int inside_int = INTEGRAL_TYPE_P (inside_type);
4065 int inside_ptr = POINTER_TYPE_P (inside_type);
4066 int inside_float = FLOAT_TYPE_P (inside_type);
4067 int inside_prec = TYPE_PRECISION (inside_type);
4068 int inside_unsignedp = TREE_UNSIGNED (inside_type);
4069 int inter_int = INTEGRAL_TYPE_P (inter_type);
4070 int inter_ptr = POINTER_TYPE_P (inter_type);
4071 int inter_float = FLOAT_TYPE_P (inter_type);
4072 int inter_prec = TYPE_PRECISION (inter_type);
4073 int inter_unsignedp = TREE_UNSIGNED (inter_type);
4074 int final_int = INTEGRAL_TYPE_P (final_type);
4075 int final_ptr = POINTER_TYPE_P (final_type);
4076 int final_float = FLOAT_TYPE_P (final_type);
4077 int final_prec = TYPE_PRECISION (final_type);
4078 int final_unsignedp = TREE_UNSIGNED (final_type);
4079
4080 /* In addition to the cases of two conversions in a row
4081 handled below, if we are converting something to its own
4082 type via an object of identical or wider precision, neither
4083 conversion is needed. */
4084 if (inside_type == final_type
4085 && ((inter_int && final_int) || (inter_float && final_float))
4086 && inter_prec >= final_prec)
4087 return TREE_OPERAND (TREE_OPERAND (t, 0), 0);
4088
4089 /* Likewise, if the intermediate and final types are either both
4090 float or both integer, we don't need the middle conversion if
4091 it is wider than the final type and doesn't change the signedness
4092 (for integers). Avoid this if the final type is a pointer
4093 since then we sometimes need the inner conversion. Likewise if
4094 the outer has a precision not equal to the size of its mode. */
4095 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
4096 || (inter_float && inside_float))
4097 && inter_prec >= inside_prec
4098 && (inter_float || inter_unsignedp == inside_unsignedp)
4099 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4100 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4101 && ! final_ptr)
4102 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4103
4104 /* If we have a sign-extension of a zero-extended value, we can
4105 replace that by a single zero-extension. */
4106 if (inside_int && inter_int && final_int
4107 && inside_prec < inter_prec && inter_prec < final_prec
4108 && inside_unsignedp && !inter_unsignedp)
4109 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4110
4111 /* Two conversions in a row are not needed unless:
4112 - some conversion is floating-point (overstrict for now), or
4113 - the intermediate type is narrower than both initial and
4114 final, or
4115 - the intermediate type and innermost type differ in signedness,
4116 and the outermost type is wider than the intermediate, or
4117 - the initial type is a pointer type and the precisions of the
4118 intermediate and final types differ, or
4119 - the final type is a pointer type and the precisions of the
4120 initial and intermediate types differ. */
4121 if (! inside_float && ! inter_float && ! final_float
4122 && (inter_prec > inside_prec || inter_prec > final_prec)
4123 && ! (inside_int && inter_int
4124 && inter_unsignedp != inside_unsignedp
4125 && inter_prec < final_prec)
4126 && ((inter_unsignedp && inter_prec > inside_prec)
4127 == (final_unsignedp && final_prec > inter_prec))
4128 && ! (inside_ptr && inter_prec != final_prec)
4129 && ! (final_ptr && inside_prec != inter_prec)
4130 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4131 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4132 && ! final_ptr)
4133 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4134 }
4135
4136 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
4137 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
4138 /* Detect assigning a bitfield. */
4139 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
4140 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
4141 {
4142 /* Don't leave an assignment inside a conversion
4143 unless assigning a bitfield. */
4144 tree prev = TREE_OPERAND (t, 0);
4145 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
4146 /* First do the assignment, then return converted constant. */
4147 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
4148 TREE_USED (t) = 1;
4149 return t;
4150 }
4151 if (!wins)
4152 {
4153 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
4154 return t;
4155 }
4156 return fold_convert (t, arg0);
4157
4158 #if 0 /* This loses on &"foo"[0]. */
4159 case ARRAY_REF:
4160 {
4161 int i;
4162
4163 /* Fold an expression like: "foo"[2] */
4164 if (TREE_CODE (arg0) == STRING_CST
4165 && TREE_CODE (arg1) == INTEGER_CST
4166 && !TREE_INT_CST_HIGH (arg1)
4167 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
4168 {
4169 t = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
4170 TREE_TYPE (t) = TREE_TYPE (TREE_TYPE (arg0));
4171 force_fit_type (t, 0);
4172 }
4173 }
4174 return t;
4175 #endif /* 0 */
4176
4177 case COMPONENT_REF:
4178 if (TREE_CODE (arg0) == CONSTRUCTOR)
4179 {
4180 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
4181 if (m)
4182 t = TREE_VALUE (m);
4183 }
4184 return t;
4185
4186 case RANGE_EXPR:
4187 TREE_CONSTANT (t) = wins;
4188 return t;
4189
4190 case NEGATE_EXPR:
4191 if (wins)
4192 {
4193 if (TREE_CODE (arg0) == INTEGER_CST)
4194 {
4195 HOST_WIDE_INT low, high;
4196 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
4197 TREE_INT_CST_HIGH (arg0),
4198 &low, &high);
4199 t = build_int_2 (low, high);
4200 TREE_TYPE (t) = type;
4201 TREE_OVERFLOW (t)
4202 = (TREE_OVERFLOW (arg0)
4203 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
4204 TREE_CONSTANT_OVERFLOW (t)
4205 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
4206 }
4207 else if (TREE_CODE (arg0) == REAL_CST)
4208 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
4209 }
4210 else if (TREE_CODE (arg0) == NEGATE_EXPR)
4211 return TREE_OPERAND (arg0, 0);
4212
4213 /* Convert - (a - b) to (b - a) for non-floating-point. */
4214 else if (TREE_CODE (arg0) == MINUS_EXPR && ! FLOAT_TYPE_P (type))
4215 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
4216 TREE_OPERAND (arg0, 0));
4217
4218 return t;
4219
4220 case ABS_EXPR:
4221 if (wins)
4222 {
4223 if (TREE_CODE (arg0) == INTEGER_CST)
4224 {
4225 if (! TREE_UNSIGNED (type)
4226 && TREE_INT_CST_HIGH (arg0) < 0)
4227 {
4228 HOST_WIDE_INT low, high;
4229 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
4230 TREE_INT_CST_HIGH (arg0),
4231 &low, &high);
4232 t = build_int_2 (low, high);
4233 TREE_TYPE (t) = type;
4234 TREE_OVERFLOW (t)
4235 = (TREE_OVERFLOW (arg0)
4236 | force_fit_type (t, overflow));
4237 TREE_CONSTANT_OVERFLOW (t)
4238 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
4239 }
4240 }
4241 else if (TREE_CODE (arg0) == REAL_CST)
4242 {
4243 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
4244 t = build_real (type,
4245 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
4246 }
4247 }
4248 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
4249 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
4250 return t;
4251
4252 case CONJ_EXPR:
4253 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
4254 return arg0;
4255 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
4256 return build (COMPLEX_EXPR, TREE_TYPE (arg0),
4257 TREE_OPERAND (arg0, 0),
4258 fold (build1 (NEGATE_EXPR,
4259 TREE_TYPE (TREE_TYPE (arg0)),
4260 TREE_OPERAND (arg0, 1))));
4261 else if (TREE_CODE (arg0) == COMPLEX_CST)
4262 return build_complex (type, TREE_OPERAND (arg0, 0),
4263 fold (build1 (NEGATE_EXPR,
4264 TREE_TYPE (TREE_TYPE (arg0)),
4265 TREE_OPERAND (arg0, 1))));
4266 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
4267 return fold (build (TREE_CODE (arg0), type,
4268 fold (build1 (CONJ_EXPR, type,
4269 TREE_OPERAND (arg0, 0))),
4270 fold (build1 (CONJ_EXPR,
4271 type, TREE_OPERAND (arg0, 1)))));
4272 else if (TREE_CODE (arg0) == CONJ_EXPR)
4273 return TREE_OPERAND (arg0, 0);
4274 return t;
4275
4276 case BIT_NOT_EXPR:
4277 if (wins)
4278 {
4279 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
4280 ~ TREE_INT_CST_HIGH (arg0));
4281 TREE_TYPE (t) = type;
4282 force_fit_type (t, 0);
4283 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
4284 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
4285 }
4286 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
4287 return TREE_OPERAND (arg0, 0);
4288 return t;
4289
4290 case PLUS_EXPR:
4291 /* A + (-B) -> A - B */
4292 if (TREE_CODE (arg1) == NEGATE_EXPR)
4293 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
4294 else if (! FLOAT_TYPE_P (type))
4295 {
4296 if (integer_zerop (arg1))
4297 return non_lvalue (convert (type, arg0));
4298
4299 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
4300 with a constant, and the two constants have no bits in common,
4301 we should treat this as a BIT_IOR_EXPR since this may produce more
4302 simplifications. */
4303 if (TREE_CODE (arg0) == BIT_AND_EXPR
4304 && TREE_CODE (arg1) == BIT_AND_EXPR
4305 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
4306 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
4307 && integer_zerop (const_binop (BIT_AND_EXPR,
4308 TREE_OPERAND (arg0, 1),
4309 TREE_OPERAND (arg1, 1), 0)))
4310 {
4311 code = BIT_IOR_EXPR;
4312 goto bit_ior;
4313 }
4314
4315 /* (A * C) + (B * C) -> (A+B) * C. Since we are most concerned
4316 about the case where C is a constant, just try one of the
4317 four possibilities. */
4318
4319 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
4320 && operand_equal_p (TREE_OPERAND (arg0, 1),
4321 TREE_OPERAND (arg1, 1), 0))
4322 return fold (build (MULT_EXPR, type,
4323 fold (build (PLUS_EXPR, type,
4324 TREE_OPERAND (arg0, 0),
4325 TREE_OPERAND (arg1, 0))),
4326 TREE_OPERAND (arg0, 1)));
4327 }
4328 /* In IEEE floating point, x+0 may not equal x. */
4329 else if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4330 || flag_fast_math)
4331 && real_zerop (arg1))
4332 return non_lvalue (convert (type, arg0));
4333 associate:
4334 /* In most languages, can't associate operations on floats
4335 through parentheses. Rather than remember where the parentheses
4336 were, we don't associate floats at all. It shouldn't matter much.
4337 However, associating multiplications is only very slightly
4338 inaccurate, so do that if -ffast-math is specified. */
4339 if (FLOAT_TYPE_P (type)
4340 && ! (flag_fast_math && code == MULT_EXPR))
4341 goto binary;
4342
4343 /* The varsign == -1 cases happen only for addition and subtraction.
4344 It says that the arg that was split was really CON minus VAR.
4345 The rest of the code applies to all associative operations. */
4346 if (!wins)
4347 {
4348 tree var, con;
4349 int varsign;
4350
4351 if (split_tree (arg0, code, &var, &con, &varsign))
4352 {
4353 if (varsign == -1)
4354 {
4355 /* EXPR is (CON-VAR) +- ARG1. */
4356 /* If it is + and VAR==ARG1, return just CONST. */
4357 if (code == PLUS_EXPR && operand_equal_p (var, arg1, 0))
4358 return convert (TREE_TYPE (t), con);
4359
4360 /* If ARG0 is a constant, don't change things around;
4361 instead keep all the constant computations together. */
4362
4363 if (TREE_CONSTANT (arg0))
4364 return t;
4365
4366 /* Otherwise return (CON +- ARG1) - VAR. */
4367 t = build (MINUS_EXPR, type,
4368 fold (build (code, type, con, arg1)), var);
4369 }
4370 else
4371 {
4372 /* EXPR is (VAR+CON) +- ARG1. */
4373 /* If it is - and VAR==ARG1, return just CONST. */
4374 if (code == MINUS_EXPR && operand_equal_p (var, arg1, 0))
4375 return convert (TREE_TYPE (t), con);
4376
4377 /* If ARG0 is a constant, don't change things around;
4378 instead keep all the constant computations together. */
4379
4380 if (TREE_CONSTANT (arg0))
4381 return t;
4382
4383 /* Otherwise return VAR +- (ARG1 +- CON). */
4384 tem = fold (build (code, type, arg1, con));
4385 t = build (code, type, var, tem);
4386
4387 if (integer_zerop (tem)
4388 && (code == PLUS_EXPR || code == MINUS_EXPR))
4389 return convert (type, var);
4390 /* If we have x +/- (c - d) [c an explicit integer]
4391 change it to x -/+ (d - c) since if d is relocatable
4392 then the latter can be a single immediate insn
4393 and the former cannot. */
4394 if (TREE_CODE (tem) == MINUS_EXPR
4395 && TREE_CODE (TREE_OPERAND (tem, 0)) == INTEGER_CST)
4396 {
4397 tree tem1 = TREE_OPERAND (tem, 1);
4398 TREE_OPERAND (tem, 1) = TREE_OPERAND (tem, 0);
4399 TREE_OPERAND (tem, 0) = tem1;
4400 TREE_SET_CODE (t,
4401 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
4402 }
4403 }
4404 return t;
4405 }
4406
4407 if (split_tree (arg1, code, &var, &con, &varsign))
4408 {
4409 if (TREE_CONSTANT (arg1))
4410 return t;
4411
4412 if (varsign == -1)
4413 TREE_SET_CODE (t,
4414 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
4415
4416 /* EXPR is ARG0 +- (CON +- VAR). */
4417 if (TREE_CODE (t) == MINUS_EXPR
4418 && operand_equal_p (var, arg0, 0))
4419 {
4420 /* If VAR and ARG0 cancel, return just CON or -CON. */
4421 if (code == PLUS_EXPR)
4422 return convert (TREE_TYPE (t), con);
4423 return fold (build1 (NEGATE_EXPR, TREE_TYPE (t),
4424 convert (TREE_TYPE (t), con)));
4425 }
4426
4427 t = build (TREE_CODE (t), type,
4428 fold (build (code, TREE_TYPE (t), arg0, con)), var);
4429
4430 if (integer_zerop (TREE_OPERAND (t, 0))
4431 && TREE_CODE (t) == PLUS_EXPR)
4432 return convert (TREE_TYPE (t), var);
4433 return t;
4434 }
4435 }
4436 binary:
4437 #if defined (REAL_IS_NOT_DOUBLE) && ! defined (REAL_ARITHMETIC)
4438 if (TREE_CODE (arg1) == REAL_CST)
4439 return t;
4440 #endif /* REAL_IS_NOT_DOUBLE, and no REAL_ARITHMETIC */
4441 if (wins)
4442 t1 = const_binop (code, arg0, arg1, 0);
4443 if (t1 != NULL_TREE)
4444 {
4445 /* The return value should always have
4446 the same type as the original expression. */
4447 if (TREE_TYPE (t1) != TREE_TYPE (t))
4448 t1 = convert (TREE_TYPE (t), t1);
4449
4450 return t1;
4451 }
4452 return t;
4453
4454 case MINUS_EXPR:
4455 if (! FLOAT_TYPE_P (type))
4456 {
4457 if (! wins && integer_zerop (arg0))
4458 return build1 (NEGATE_EXPR, type, arg1);
4459 if (integer_zerop (arg1))
4460 return non_lvalue (convert (type, arg0));
4461
4462 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
4463 about the case where C is a constant, just try one of the
4464 four possibilities. */
4465
4466 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
4467 && operand_equal_p (TREE_OPERAND (arg0, 1),
4468 TREE_OPERAND (arg1, 1), 0))
4469 return fold (build (MULT_EXPR, type,
4470 fold (build (MINUS_EXPR, type,
4471 TREE_OPERAND (arg0, 0),
4472 TREE_OPERAND (arg1, 0))),
4473 TREE_OPERAND (arg0, 1)));
4474 }
4475 /* Convert A - (-B) to A + B. */
4476 else if (TREE_CODE (arg1) == NEGATE_EXPR)
4477 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
4478
4479 else if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4480 || flag_fast_math)
4481 {
4482 /* Except with IEEE floating point, 0-x equals -x. */
4483 if (! wins && real_zerop (arg0))
4484 return build1 (NEGATE_EXPR, type, arg1);
4485 /* Except with IEEE floating point, x-0 equals x. */
4486 if (real_zerop (arg1))
4487 return non_lvalue (convert (type, arg0));
4488 }
4489
4490 /* Fold &x - &x. This can happen from &x.foo - &x.
4491 This is unsafe for certain floats even in non-IEEE formats.
4492 In IEEE, it is unsafe because it does wrong for NaNs.
4493 Also note that operand_equal_p is always false if an operand
4494 is volatile. */
4495
4496 if ((! FLOAT_TYPE_P (type) || flag_fast_math)
4497 && operand_equal_p (arg0, arg1, 0))
4498 return convert (type, integer_zero_node);
4499
4500 goto associate;
4501
4502 case MULT_EXPR:
4503 if (! FLOAT_TYPE_P (type))
4504 {
4505 if (integer_zerop (arg1))
4506 return omit_one_operand (type, arg1, arg0);
4507 if (integer_onep (arg1))
4508 return non_lvalue (convert (type, arg0));
4509
4510 /* ((A / C) * C) is A if the division is an
4511 EXACT_DIV_EXPR. Since C is normally a constant,
4512 just check for one of the four possibilities. */
4513
4514 if (TREE_CODE (arg0) == EXACT_DIV_EXPR
4515 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
4516 return TREE_OPERAND (arg0, 0);
4517
4518 /* (a * (1 << b)) is (a << b) */
4519 if (TREE_CODE (arg1) == LSHIFT_EXPR
4520 && integer_onep (TREE_OPERAND (arg1, 0)))
4521 return fold (build (LSHIFT_EXPR, type, arg0,
4522 TREE_OPERAND (arg1, 1)));
4523 if (TREE_CODE (arg0) == LSHIFT_EXPR
4524 && integer_onep (TREE_OPERAND (arg0, 0)))
4525 return fold (build (LSHIFT_EXPR, type, arg1,
4526 TREE_OPERAND (arg0, 1)));
4527 }
4528 else
4529 {
4530 /* x*0 is 0, except for IEEE floating point. */
4531 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4532 || flag_fast_math)
4533 && real_zerop (arg1))
4534 return omit_one_operand (type, arg1, arg0);
4535 /* In IEEE floating point, x*1 is not equivalent to x for snans.
4536 However, ANSI says we can drop signals,
4537 so we can do this anyway. */
4538 if (real_onep (arg1))
4539 return non_lvalue (convert (type, arg0));
4540 /* x*2 is x+x */
4541 if (! wins && real_twop (arg1) && current_function_decl != 0)
4542 {
4543 tree arg = save_expr (arg0);
4544 return build (PLUS_EXPR, type, arg, arg);
4545 }
4546 }
4547 goto associate;
4548
4549 case BIT_IOR_EXPR:
4550 bit_ior:
4551 {
4552 register enum tree_code code0, code1;
4553
4554 if (integer_all_onesp (arg1))
4555 return omit_one_operand (type, arg1, arg0);
4556 if (integer_zerop (arg1))
4557 return non_lvalue (convert (type, arg0));
4558 t1 = distribute_bit_expr (code, type, arg0, arg1);
4559 if (t1 != NULL_TREE)
4560 return t1;
4561
4562 /* (A << C1) | (A >> C2) if A is unsigned and C1+C2 is the size of A
4563 is a rotate of A by C1 bits. */
4564 /* (A << B) | (A >> (Z - B)) if A is unsigned and Z is the size of A
4565 is a rotate of A by B bits. */
4566
4567 code0 = TREE_CODE (arg0);
4568 code1 = TREE_CODE (arg1);
4569 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
4570 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
4571 && operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1,0), 0)
4572 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
4573 {
4574 register tree tree01, tree11;
4575 register enum tree_code code01, code11;
4576
4577 tree01 = TREE_OPERAND (arg0, 1);
4578 tree11 = TREE_OPERAND (arg1, 1);
4579 code01 = TREE_CODE (tree01);
4580 code11 = TREE_CODE (tree11);
4581 if (code01 == INTEGER_CST
4582 && code11 == INTEGER_CST
4583 && TREE_INT_CST_HIGH (tree01) == 0
4584 && TREE_INT_CST_HIGH (tree11) == 0
4585 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
4586 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
4587 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
4588 code0 == LSHIFT_EXPR ? tree01 : tree11);
4589 else if (code11 == MINUS_EXPR
4590 && TREE_CODE (TREE_OPERAND (tree11, 0)) == INTEGER_CST
4591 && TREE_INT_CST_HIGH (TREE_OPERAND (tree11, 0)) == 0
4592 && TREE_INT_CST_LOW (TREE_OPERAND (tree11, 0))
4593 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))
4594 && operand_equal_p (tree01, TREE_OPERAND (tree11, 1), 0))
4595 return build (code0 == LSHIFT_EXPR ? LROTATE_EXPR : RROTATE_EXPR,
4596 type, TREE_OPERAND (arg0, 0), tree01);
4597 else if (code01 == MINUS_EXPR
4598 && TREE_CODE (TREE_OPERAND (tree01, 0)) == INTEGER_CST
4599 && TREE_INT_CST_HIGH (TREE_OPERAND (tree01, 0)) == 0
4600 && TREE_INT_CST_LOW (TREE_OPERAND (tree01, 0))
4601 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))
4602 && operand_equal_p (tree11, TREE_OPERAND (tree01, 1), 0))
4603 return build (code0 != LSHIFT_EXPR ? LROTATE_EXPR : RROTATE_EXPR,
4604 type, TREE_OPERAND (arg0, 0), tree11);
4605 }
4606
4607 goto associate;
4608 }
4609
4610 case BIT_XOR_EXPR:
4611 if (integer_zerop (arg1))
4612 return non_lvalue (convert (type, arg0));
4613 if (integer_all_onesp (arg1))
4614 return fold (build1 (BIT_NOT_EXPR, type, arg0));
4615 goto associate;
4616
4617 case BIT_AND_EXPR:
4618 bit_and:
4619 if (integer_all_onesp (arg1))
4620 return non_lvalue (convert (type, arg0));
4621 if (integer_zerop (arg1))
4622 return omit_one_operand (type, arg1, arg0);
4623 t1 = distribute_bit_expr (code, type, arg0, arg1);
4624 if (t1 != NULL_TREE)
4625 return t1;
4626 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
4627 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == NOP_EXPR
4628 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0))))
4629 {
4630 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)));
4631 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
4632 && (~TREE_INT_CST_LOW (arg0)
4633 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
4634 return build1 (NOP_EXPR, type, TREE_OPERAND (arg1, 0));
4635 }
4636 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
4637 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
4638 {
4639 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
4640 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
4641 && (~TREE_INT_CST_LOW (arg1)
4642 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
4643 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
4644 }
4645 goto associate;
4646
4647 case BIT_ANDTC_EXPR:
4648 if (integer_all_onesp (arg0))
4649 return non_lvalue (convert (type, arg1));
4650 if (integer_zerop (arg0))
4651 return omit_one_operand (type, arg0, arg1);
4652 if (TREE_CODE (arg1) == INTEGER_CST)
4653 {
4654 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
4655 code = BIT_AND_EXPR;
4656 goto bit_and;
4657 }
4658 goto binary;
4659
4660 case RDIV_EXPR:
4661 /* In most cases, do nothing with a divide by zero. */
4662 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4663 #ifndef REAL_INFINITY
4664 if (TREE_CODE (arg1) == REAL_CST && real_zerop (arg1))
4665 return t;
4666 #endif
4667 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4668
4669 /* In IEEE floating point, x/1 is not equivalent to x for snans.
4670 However, ANSI says we can drop signals, so we can do this anyway. */
4671 if (real_onep (arg1))
4672 return non_lvalue (convert (type, arg0));
4673
4674 /* If ARG1 is a constant, we can convert this to a multiply by the
4675 reciprocal. This does not have the same rounding properties,
4676 so only do this if -ffast-math. We can actually always safely
4677 do it if ARG1 is a power of two, but it's hard to tell if it is
4678 or not in a portable manner. */
4679 if (TREE_CODE (arg1) == REAL_CST)
4680 {
4681 if (flag_fast_math
4682 && 0 != (tem = const_binop (code, build_real (type, dconst1),
4683 arg1, 0)))
4684 return fold (build (MULT_EXPR, type, arg0, tem));
4685 /* Find the reciprocal if optimizing and the result is exact. */
4686 else if (optimize)
4687 {
4688 REAL_VALUE_TYPE r;
4689 r = TREE_REAL_CST (arg1);
4690 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
4691 {
4692 tem = build_real (type, r);
4693 return fold (build (MULT_EXPR, type, arg0, tem));
4694 }
4695 }
4696 }
4697 goto binary;
4698
4699 case TRUNC_DIV_EXPR:
4700 case ROUND_DIV_EXPR:
4701 case FLOOR_DIV_EXPR:
4702 case CEIL_DIV_EXPR:
4703 case EXACT_DIV_EXPR:
4704 if (integer_onep (arg1))
4705 return non_lvalue (convert (type, arg0));
4706 if (integer_zerop (arg1))
4707 return t;
4708
4709 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
4710 operation, EXACT_DIV_EXPR.
4711
4712 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
4713 At one time others generated faster code, it's not clear if they do
4714 after the last round to changes to the DIV code in expmed.c. */
4715 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
4716 && multiple_of_p (type, arg0, arg1))
4717 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
4718
4719 /* If we have ((a / C1) / C2) where both division are the same type, try
4720 to simplify. First see if C1 * C2 overflows or not. */
4721 if (TREE_CODE (arg0) == code && TREE_CODE (arg1) == INTEGER_CST
4722 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
4723 {
4724 tree new_divisor;
4725
4726 new_divisor = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 1), arg1, 0);
4727 tem = const_binop (FLOOR_DIV_EXPR, new_divisor, arg1, 0);
4728
4729 if (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) == TREE_INT_CST_LOW (tem)
4730 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == TREE_INT_CST_HIGH (tem))
4731 {
4732 /* If no overflow, divide by C1*C2. */
4733 return fold (build (code, type, TREE_OPERAND (arg0, 0), new_divisor));
4734 }
4735 }
4736
4737 /* Look for ((a * C1) / C3) or (((a * C1) + C2) / C3),
4738 where C1 % C3 == 0 or C3 % C1 == 0. We can simplify these
4739 expressions, which often appear in the offsets or sizes of
4740 objects with a varying size. Only deal with positive divisors
4741 and multiplicands. If C2 is negative, we must have C2 % C3 == 0.
4742
4743 Look for NOPs and SAVE_EXPRs inside. */
4744
4745 if (TREE_CODE (arg1) == INTEGER_CST
4746 && tree_int_cst_sgn (arg1) >= 0)
4747 {
4748 int have_save_expr = 0;
4749 tree c2 = integer_zero_node;
4750 tree xarg0 = arg0;
4751
4752 if (TREE_CODE (xarg0) == SAVE_EXPR && SAVE_EXPR_RTL (xarg0) == 0)
4753 have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
4754
4755 STRIP_NOPS (xarg0);
4756
4757 /* Look inside the dividend and simplify using EXACT_DIV_EXPR
4758 if possible. */
4759 if (TREE_CODE (xarg0) == MULT_EXPR
4760 && multiple_of_p (type, TREE_OPERAND (xarg0, 0), arg1))
4761 {
4762 tree t;
4763
4764 t = fold (build (MULT_EXPR, type,
4765 fold (build (EXACT_DIV_EXPR, type,
4766 TREE_OPERAND (xarg0, 0), arg1)),
4767 TREE_OPERAND (xarg0, 1)));
4768 if (have_save_expr)
4769 t = save_expr (t);
4770 return t;
4771
4772 }
4773
4774 if (TREE_CODE (xarg0) == MULT_EXPR
4775 && multiple_of_p (type, TREE_OPERAND (xarg0, 1), arg1))
4776 {
4777 tree t;
4778
4779 t = fold (build (MULT_EXPR, type,
4780 fold (build (EXACT_DIV_EXPR, type,
4781 TREE_OPERAND (xarg0, 1), arg1)),
4782 TREE_OPERAND (xarg0, 0)));
4783 if (have_save_expr)
4784 t = save_expr (t);
4785 return t;
4786 }
4787
4788 if (TREE_CODE (xarg0) == PLUS_EXPR
4789 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST)
4790 c2 = TREE_OPERAND (xarg0, 1), xarg0 = TREE_OPERAND (xarg0, 0);
4791 else if (TREE_CODE (xarg0) == MINUS_EXPR
4792 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4793 /* If we are doing this computation unsigned, the negate
4794 is incorrect. */
4795 && ! TREE_UNSIGNED (type))
4796 {
4797 c2 = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (xarg0, 1)));
4798 xarg0 = TREE_OPERAND (xarg0, 0);
4799 }
4800
4801 if (TREE_CODE (xarg0) == SAVE_EXPR && SAVE_EXPR_RTL (xarg0) == 0)
4802 have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
4803
4804 STRIP_NOPS (xarg0);
4805
4806 if (TREE_CODE (xarg0) == MULT_EXPR
4807 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4808 && tree_int_cst_sgn (TREE_OPERAND (xarg0, 1)) >= 0
4809 && (integer_zerop (const_binop (TRUNC_MOD_EXPR,
4810 TREE_OPERAND (xarg0, 1), arg1, 1))
4811 || integer_zerop (const_binop (TRUNC_MOD_EXPR, arg1,
4812 TREE_OPERAND (xarg0, 1), 1)))
4813 && (tree_int_cst_sgn (c2) >= 0
4814 || integer_zerop (const_binop (TRUNC_MOD_EXPR, c2,
4815 arg1, 1))))
4816 {
4817 tree outer_div = integer_one_node;
4818 tree c1 = TREE_OPERAND (xarg0, 1);
4819 tree c3 = arg1;
4820
4821 /* If C3 > C1, set them equal and do a divide by
4822 C3/C1 at the end of the operation. */
4823 if (tree_int_cst_lt (c1, c3))
4824 outer_div = const_binop (code, c3, c1, 0), c3 = c1;
4825
4826 /* The result is A * (C1/C3) + (C2/C3). */
4827 t = fold (build (PLUS_EXPR, type,
4828 fold (build (MULT_EXPR, type,
4829 TREE_OPERAND (xarg0, 0),
4830 const_binop (code, c1, c3, 1))),
4831 const_binop (code, c2, c3, 1)));
4832
4833 if (! integer_onep (outer_div))
4834 t = fold (build (code, type, t, convert (type, outer_div)));
4835
4836 if (have_save_expr)
4837 t = save_expr (t);
4838
4839 return t;
4840 }
4841 }
4842
4843 goto binary;
4844
4845 case CEIL_MOD_EXPR:
4846 case FLOOR_MOD_EXPR:
4847 case ROUND_MOD_EXPR:
4848 case TRUNC_MOD_EXPR:
4849 if (integer_onep (arg1))
4850 return omit_one_operand (type, integer_zero_node, arg0);
4851 if (integer_zerop (arg1))
4852 return t;
4853
4854 /* Look for ((a * C1) % C3) or (((a * C1) + C2) % C3),
4855 where C1 % C3 == 0. Handle similarly to the division case,
4856 but don't bother with SAVE_EXPRs. */
4857
4858 if (TREE_CODE (arg1) == INTEGER_CST
4859 && ! integer_zerop (arg1))
4860 {
4861 tree c2 = integer_zero_node;
4862 tree xarg0 = arg0;
4863
4864 if (TREE_CODE (xarg0) == PLUS_EXPR
4865 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST)
4866 c2 = TREE_OPERAND (xarg0, 1), xarg0 = TREE_OPERAND (xarg0, 0);
4867 else if (TREE_CODE (xarg0) == MINUS_EXPR
4868 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4869 && ! TREE_UNSIGNED (type))
4870 {
4871 c2 = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (xarg0, 1)));
4872 xarg0 = TREE_OPERAND (xarg0, 0);
4873 }
4874
4875 STRIP_NOPS (xarg0);
4876
4877 if (TREE_CODE (xarg0) == MULT_EXPR
4878 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4879 && integer_zerop (const_binop (TRUNC_MOD_EXPR,
4880 TREE_OPERAND (xarg0, 1),
4881 arg1, 1))
4882 && tree_int_cst_sgn (c2) >= 0)
4883 /* The result is (C2%C3). */
4884 return omit_one_operand (type, const_binop (code, c2, arg1, 1),
4885 TREE_OPERAND (xarg0, 0));
4886 }
4887
4888 goto binary;
4889
4890 case LSHIFT_EXPR:
4891 case RSHIFT_EXPR:
4892 case LROTATE_EXPR:
4893 case RROTATE_EXPR:
4894 if (integer_zerop (arg1))
4895 return non_lvalue (convert (type, arg0));
4896 /* Since negative shift count is not well-defined,
4897 don't try to compute it in the compiler. */
4898 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
4899 return t;
4900 /* Rewrite an LROTATE_EXPR by a constant into an
4901 RROTATE_EXPR by a new constant. */
4902 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
4903 {
4904 TREE_SET_CODE (t, RROTATE_EXPR);
4905 code = RROTATE_EXPR;
4906 TREE_OPERAND (t, 1) = arg1
4907 = const_binop
4908 (MINUS_EXPR,
4909 convert (TREE_TYPE (arg1),
4910 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
4911 arg1, 0);
4912 if (tree_int_cst_sgn (arg1) < 0)
4913 return t;
4914 }
4915
4916 /* If we have a rotate of a bit operation with the rotate count and
4917 the second operand of the bit operation both constant,
4918 permute the two operations. */
4919 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
4920 && (TREE_CODE (arg0) == BIT_AND_EXPR
4921 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
4922 || TREE_CODE (arg0) == BIT_IOR_EXPR
4923 || TREE_CODE (arg0) == BIT_XOR_EXPR)
4924 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
4925 return fold (build (TREE_CODE (arg0), type,
4926 fold (build (code, type,
4927 TREE_OPERAND (arg0, 0), arg1)),
4928 fold (build (code, type,
4929 TREE_OPERAND (arg0, 1), arg1))));
4930
4931 /* Two consecutive rotates adding up to the width of the mode can
4932 be ignored. */
4933 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
4934 && TREE_CODE (arg0) == RROTATE_EXPR
4935 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
4936 && TREE_INT_CST_HIGH (arg1) == 0
4937 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
4938 && ((TREE_INT_CST_LOW (arg1)
4939 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
4940 == GET_MODE_BITSIZE (TYPE_MODE (type))))
4941 return TREE_OPERAND (arg0, 0);
4942
4943 goto binary;
4944
4945 case MIN_EXPR:
4946 if (operand_equal_p (arg0, arg1, 0))
4947 return arg0;
4948 if (INTEGRAL_TYPE_P (type)
4949 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
4950 return omit_one_operand (type, arg1, arg0);
4951 goto associate;
4952
4953 case MAX_EXPR:
4954 if (operand_equal_p (arg0, arg1, 0))
4955 return arg0;
4956 if (INTEGRAL_TYPE_P (type)
4957 && TYPE_MAX_VALUE (type)
4958 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
4959 return omit_one_operand (type, arg1, arg0);
4960 goto associate;
4961
4962 case TRUTH_NOT_EXPR:
4963 /* Note that the operand of this must be an int
4964 and its values must be 0 or 1.
4965 ("true" is a fixed value perhaps depending on the language,
4966 but we don't handle values other than 1 correctly yet.) */
4967 tem = invert_truthvalue (arg0);
4968 /* Avoid infinite recursion. */
4969 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
4970 return t;
4971 return convert (type, tem);
4972
4973 case TRUTH_ANDIF_EXPR:
4974 /* Note that the operands of this must be ints
4975 and their values must be 0 or 1.
4976 ("true" is a fixed value perhaps depending on the language.) */
4977 /* If first arg is constant zero, return it. */
4978 if (integer_zerop (arg0))
4979 return arg0;
4980 case TRUTH_AND_EXPR:
4981 /* If either arg is constant true, drop it. */
4982 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
4983 return non_lvalue (arg1);
4984 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
4985 return non_lvalue (arg0);
4986 /* If second arg is constant zero, result is zero, but first arg
4987 must be evaluated. */
4988 if (integer_zerop (arg1))
4989 return omit_one_operand (type, arg1, arg0);
4990 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
4991 case will be handled here. */
4992 if (integer_zerop (arg0))
4993 return omit_one_operand (type, arg0, arg1);
4994
4995 truth_andor:
4996 /* We only do these simplifications if we are optimizing. */
4997 if (!optimize)
4998 return t;
4999
5000 /* Check for things like (A || B) && (A || C). We can convert this
5001 to A || (B && C). Note that either operator can be any of the four
5002 truth and/or operations and the transformation will still be
5003 valid. Also note that we only care about order for the
5004 ANDIF and ORIF operators. If B contains side effects, this
5005 might change the truth-value of A. */
5006 if (TREE_CODE (arg0) == TREE_CODE (arg1)
5007 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
5008 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
5009 || TREE_CODE (arg0) == TRUTH_AND_EXPR
5010 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
5011 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
5012 {
5013 tree a00 = TREE_OPERAND (arg0, 0);
5014 tree a01 = TREE_OPERAND (arg0, 1);
5015 tree a10 = TREE_OPERAND (arg1, 0);
5016 tree a11 = TREE_OPERAND (arg1, 1);
5017 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
5018 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
5019 && (code == TRUTH_AND_EXPR
5020 || code == TRUTH_OR_EXPR));
5021
5022 if (operand_equal_p (a00, a10, 0))
5023 return fold (build (TREE_CODE (arg0), type, a00,
5024 fold (build (code, type, a01, a11))));
5025 else if (commutative && operand_equal_p (a00, a11, 0))
5026 return fold (build (TREE_CODE (arg0), type, a00,
5027 fold (build (code, type, a01, a10))));
5028 else if (commutative && operand_equal_p (a01, a10, 0))
5029 return fold (build (TREE_CODE (arg0), type, a01,
5030 fold (build (code, type, a00, a11))));
5031
5032 /* This case if tricky because we must either have commutative
5033 operators or else A10 must not have side-effects. */
5034
5035 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
5036 && operand_equal_p (a01, a11, 0))
5037 return fold (build (TREE_CODE (arg0), type,
5038 fold (build (code, type, a00, a10)),
5039 a01));
5040 }
5041
5042 /* See if we can build a range comparison. */
5043 if (0 != (tem = fold_range_test (t)))
5044 return tem;
5045
5046 /* Check for the possibility of merging component references. If our
5047 lhs is another similar operation, try to merge its rhs with our
5048 rhs. Then try to merge our lhs and rhs. */
5049 if (TREE_CODE (arg0) == code
5050 && 0 != (tem = fold_truthop (code, type,
5051 TREE_OPERAND (arg0, 1), arg1)))
5052 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
5053
5054 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
5055 return tem;
5056
5057 return t;
5058
5059 case TRUTH_ORIF_EXPR:
5060 /* Note that the operands of this must be ints
5061 and their values must be 0 or true.
5062 ("true" is a fixed value perhaps depending on the language.) */
5063 /* If first arg is constant true, return it. */
5064 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5065 return arg0;
5066 case TRUTH_OR_EXPR:
5067 /* If either arg is constant zero, drop it. */
5068 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
5069 return non_lvalue (arg1);
5070 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1))
5071 return non_lvalue (arg0);
5072 /* If second arg is constant true, result is true, but we must
5073 evaluate first arg. */
5074 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
5075 return omit_one_operand (type, arg1, arg0);
5076 /* Likewise for first arg, but note this only occurs here for
5077 TRUTH_OR_EXPR. */
5078 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5079 return omit_one_operand (type, arg0, arg1);
5080 goto truth_andor;
5081
5082 case TRUTH_XOR_EXPR:
5083 /* If either arg is constant zero, drop it. */
5084 if (integer_zerop (arg0))
5085 return non_lvalue (arg1);
5086 if (integer_zerop (arg1))
5087 return non_lvalue (arg0);
5088 /* If either arg is constant true, this is a logical inversion. */
5089 if (integer_onep (arg0))
5090 return non_lvalue (invert_truthvalue (arg1));
5091 if (integer_onep (arg1))
5092 return non_lvalue (invert_truthvalue (arg0));
5093 return t;
5094
5095 case EQ_EXPR:
5096 case NE_EXPR:
5097 case LT_EXPR:
5098 case GT_EXPR:
5099 case LE_EXPR:
5100 case GE_EXPR:
5101 /* If one arg is a constant integer, put it last. */
5102 if (TREE_CODE (arg0) == INTEGER_CST
5103 && TREE_CODE (arg1) != INTEGER_CST)
5104 {
5105 TREE_OPERAND (t, 0) = arg1;
5106 TREE_OPERAND (t, 1) = arg0;
5107 arg0 = TREE_OPERAND (t, 0);
5108 arg1 = TREE_OPERAND (t, 1);
5109 code = swap_tree_comparison (code);
5110 TREE_SET_CODE (t, code);
5111 }
5112
5113 /* Convert foo++ == CONST into ++foo == CONST + INCR.
5114 First, see if one arg is constant; find the constant arg
5115 and the other one. */
5116 {
5117 tree constop = 0, varop = NULL_TREE;
5118 int constopnum = -1;
5119
5120 if (TREE_CONSTANT (arg1))
5121 constopnum = 1, constop = arg1, varop = arg0;
5122 if (TREE_CONSTANT (arg0))
5123 constopnum = 0, constop = arg0, varop = arg1;
5124
5125 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
5126 {
5127 /* This optimization is invalid for ordered comparisons
5128 if CONST+INCR overflows or if foo+incr might overflow.
5129 This optimization is invalid for floating point due to rounding.
5130 For pointer types we assume overflow doesn't happen. */
5131 if (TREE_CODE (TREE_TYPE (varop)) == POINTER_TYPE
5132 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
5133 && (code == EQ_EXPR || code == NE_EXPR)))
5134 {
5135 tree newconst
5136 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
5137 constop, TREE_OPERAND (varop, 1)));
5138 TREE_SET_CODE (varop, PREINCREMENT_EXPR);
5139
5140 /* If VAROP is a reference to a bitfield, we must mask
5141 the constant by the width of the field. */
5142 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
5143 && DECL_BIT_FIELD(TREE_OPERAND
5144 (TREE_OPERAND (varop, 0), 1)))
5145 {
5146 int size
5147 = TREE_INT_CST_LOW (DECL_SIZE
5148 (TREE_OPERAND
5149 (TREE_OPERAND (varop, 0), 1)));
5150
5151 newconst = fold (build (BIT_AND_EXPR,
5152 TREE_TYPE (varop), newconst,
5153 convert (TREE_TYPE (varop),
5154 build_int_2 (size, 0))));
5155 }
5156
5157
5158 t = build (code, type, TREE_OPERAND (t, 0),
5159 TREE_OPERAND (t, 1));
5160 TREE_OPERAND (t, constopnum) = newconst;
5161 return t;
5162 }
5163 }
5164 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
5165 {
5166 if (TREE_CODE (TREE_TYPE (varop)) == POINTER_TYPE
5167 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
5168 && (code == EQ_EXPR || code == NE_EXPR)))
5169 {
5170 tree newconst
5171 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
5172 constop, TREE_OPERAND (varop, 1)));
5173 TREE_SET_CODE (varop, PREDECREMENT_EXPR);
5174
5175 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
5176 && DECL_BIT_FIELD(TREE_OPERAND
5177 (TREE_OPERAND (varop, 0), 1)))
5178 {
5179 int size
5180 = TREE_INT_CST_LOW (DECL_SIZE
5181 (TREE_OPERAND
5182 (TREE_OPERAND (varop, 0), 1)));
5183
5184 newconst = fold (build (BIT_AND_EXPR,
5185 TREE_TYPE (varop), newconst,
5186 convert (TREE_TYPE (varop),
5187 build_int_2 (size, 0))));
5188 }
5189
5190
5191 t = build (code, type, TREE_OPERAND (t, 0),
5192 TREE_OPERAND (t, 1));
5193 TREE_OPERAND (t, constopnum) = newconst;
5194 return t;
5195 }
5196 }
5197 }
5198
5199 /* Change X >= CST to X > (CST - 1) if CST is positive. */
5200 if (TREE_CODE (arg1) == INTEGER_CST
5201 && TREE_CODE (arg0) != INTEGER_CST
5202 && tree_int_cst_sgn (arg1) > 0)
5203 {
5204 switch (TREE_CODE (t))
5205 {
5206 case GE_EXPR:
5207 code = GT_EXPR;
5208 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
5209 t = build (code, type, TREE_OPERAND (t, 0), arg1);
5210 break;
5211
5212 case LT_EXPR:
5213 code = LE_EXPR;
5214 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
5215 t = build (code, type, TREE_OPERAND (t, 0), arg1);
5216 break;
5217
5218 default:
5219 break;
5220 }
5221 }
5222
5223 /* If this is an EQ or NE comparison with zero and ARG0 is
5224 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
5225 two operations, but the latter can be done in one less insn
5226 on machines that have only two-operand insns or on which a
5227 constant cannot be the first operand. */
5228 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
5229 && TREE_CODE (arg0) == BIT_AND_EXPR)
5230 {
5231 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
5232 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
5233 return
5234 fold (build (code, type,
5235 build (BIT_AND_EXPR, TREE_TYPE (arg0),
5236 build (RSHIFT_EXPR,
5237 TREE_TYPE (TREE_OPERAND (arg0, 0)),
5238 TREE_OPERAND (arg0, 1),
5239 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
5240 convert (TREE_TYPE (arg0),
5241 integer_one_node)),
5242 arg1));
5243 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
5244 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
5245 return
5246 fold (build (code, type,
5247 build (BIT_AND_EXPR, TREE_TYPE (arg0),
5248 build (RSHIFT_EXPR,
5249 TREE_TYPE (TREE_OPERAND (arg0, 1)),
5250 TREE_OPERAND (arg0, 0),
5251 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
5252 convert (TREE_TYPE (arg0),
5253 integer_one_node)),
5254 arg1));
5255 }
5256
5257 /* If this is an NE or EQ comparison of zero against the result of a
5258 signed MOD operation whose second operand is a power of 2, make
5259 the MOD operation unsigned since it is simpler and equivalent. */
5260 if ((code == NE_EXPR || code == EQ_EXPR)
5261 && integer_zerop (arg1)
5262 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
5263 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
5264 || TREE_CODE (arg0) == CEIL_MOD_EXPR
5265 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
5266 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
5267 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5268 {
5269 tree newtype = unsigned_type (TREE_TYPE (arg0));
5270 tree newmod = build (TREE_CODE (arg0), newtype,
5271 convert (newtype, TREE_OPERAND (arg0, 0)),
5272 convert (newtype, TREE_OPERAND (arg0, 1)));
5273
5274 return build (code, type, newmod, convert (newtype, arg1));
5275 }
5276
5277 /* If this is an NE comparison of zero with an AND of one, remove the
5278 comparison since the AND will give the correct value. */
5279 if (code == NE_EXPR && integer_zerop (arg1)
5280 && TREE_CODE (arg0) == BIT_AND_EXPR
5281 && integer_onep (TREE_OPERAND (arg0, 1)))
5282 return convert (type, arg0);
5283
5284 /* If we have (A & C) == C where C is a power of 2, convert this into
5285 (A & C) != 0. Similarly for NE_EXPR. */
5286 if ((code == EQ_EXPR || code == NE_EXPR)
5287 && TREE_CODE (arg0) == BIT_AND_EXPR
5288 && integer_pow2p (TREE_OPERAND (arg0, 1))
5289 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
5290 return build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
5291 arg0, integer_zero_node);
5292
5293 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
5294 and similarly for >= into !=. */
5295 if ((code == LT_EXPR || code == GE_EXPR)
5296 && TREE_UNSIGNED (TREE_TYPE (arg0))
5297 && TREE_CODE (arg1) == LSHIFT_EXPR
5298 && integer_onep (TREE_OPERAND (arg1, 0)))
5299 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
5300 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
5301 TREE_OPERAND (arg1, 1)),
5302 convert (TREE_TYPE (arg0), integer_zero_node));
5303
5304 else if ((code == LT_EXPR || code == GE_EXPR)
5305 && TREE_UNSIGNED (TREE_TYPE (arg0))
5306 && (TREE_CODE (arg1) == NOP_EXPR
5307 || TREE_CODE (arg1) == CONVERT_EXPR)
5308 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
5309 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
5310 return
5311 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
5312 convert (TREE_TYPE (arg0),
5313 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
5314 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
5315 convert (TREE_TYPE (arg0), integer_zero_node));
5316
5317 /* Simplify comparison of something with itself. (For IEEE
5318 floating-point, we can only do some of these simplifications.) */
5319 if (operand_equal_p (arg0, arg1, 0))
5320 {
5321 switch (code)
5322 {
5323 case EQ_EXPR:
5324 case GE_EXPR:
5325 case LE_EXPR:
5326 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5327 {
5328 if (type == integer_type_node)
5329 return integer_one_node;
5330
5331 t = build_int_2 (1, 0);
5332 TREE_TYPE (t) = type;
5333 return t;
5334 }
5335 code = EQ_EXPR;
5336 TREE_SET_CODE (t, code);
5337 break;
5338
5339 case NE_EXPR:
5340 /* For NE, we can only do this simplification if integer. */
5341 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5342 break;
5343 /* ... fall through ... */
5344 case GT_EXPR:
5345 case LT_EXPR:
5346 if (type == integer_type_node)
5347 return integer_zero_node;
5348
5349 t = build_int_2 (0, 0);
5350 TREE_TYPE (t) = type;
5351 return t;
5352 default:
5353 abort ();
5354 }
5355 }
5356
5357 /* An unsigned comparison against 0 can be simplified. */
5358 if (integer_zerop (arg1)
5359 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5360 || TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE)
5361 && TREE_UNSIGNED (TREE_TYPE (arg1)))
5362 {
5363 switch (TREE_CODE (t))
5364 {
5365 case GT_EXPR:
5366 code = NE_EXPR;
5367 TREE_SET_CODE (t, NE_EXPR);
5368 break;
5369 case LE_EXPR:
5370 code = EQ_EXPR;
5371 TREE_SET_CODE (t, EQ_EXPR);
5372 break;
5373 case GE_EXPR:
5374 return omit_one_operand (type,
5375 convert (type, integer_one_node),
5376 arg0);
5377 case LT_EXPR:
5378 return omit_one_operand (type,
5379 convert (type, integer_zero_node),
5380 arg0);
5381 default:
5382 break;
5383 }
5384 }
5385
5386 /* An unsigned <= 0x7fffffff can be simplified. */
5387 {
5388 int width = TYPE_PRECISION (TREE_TYPE (arg1));
5389 if (TREE_CODE (arg1) == INTEGER_CST
5390 && ! TREE_CONSTANT_OVERFLOW (arg1)
5391 && width <= HOST_BITS_PER_WIDE_INT
5392 && TREE_INT_CST_LOW (arg1) == ((HOST_WIDE_INT) 1 << (width - 1)) - 1
5393 && TREE_INT_CST_HIGH (arg1) == 0
5394 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5395 || TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE)
5396 && TREE_UNSIGNED (TREE_TYPE (arg1)))
5397 {
5398 switch (TREE_CODE (t))
5399 {
5400 case LE_EXPR:
5401 return fold (build (GE_EXPR, type,
5402 convert (signed_type (TREE_TYPE (arg0)),
5403 arg0),
5404 convert (signed_type (TREE_TYPE (arg1)),
5405 integer_zero_node)));
5406 case GT_EXPR:
5407 return fold (build (LT_EXPR, type,
5408 convert (signed_type (TREE_TYPE (arg0)),
5409 arg0),
5410 convert (signed_type (TREE_TYPE (arg1)),
5411 integer_zero_node)));
5412 default:
5413 break;
5414 }
5415 }
5416 }
5417
5418 /* If we are comparing an expression that just has comparisons
5419 of two integer values, arithmetic expressions of those comparisons,
5420 and constants, we can simplify it. There are only three cases
5421 to check: the two values can either be equal, the first can be
5422 greater, or the second can be greater. Fold the expression for
5423 those three values. Since each value must be 0 or 1, we have
5424 eight possibilities, each of which corresponds to the constant 0
5425 or 1 or one of the six possible comparisons.
5426
5427 This handles common cases like (a > b) == 0 but also handles
5428 expressions like ((x > y) - (y > x)) > 0, which supposedly
5429 occur in macroized code. */
5430
5431 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
5432 {
5433 tree cval1 = 0, cval2 = 0;
5434 int save_p = 0;
5435
5436 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
5437 /* Don't handle degenerate cases here; they should already
5438 have been handled anyway. */
5439 && cval1 != 0 && cval2 != 0
5440 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
5441 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
5442 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
5443 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
5444 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
5445 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
5446 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
5447 {
5448 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
5449 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
5450
5451 /* We can't just pass T to eval_subst in case cval1 or cval2
5452 was the same as ARG1. */
5453
5454 tree high_result
5455 = fold (build (code, type,
5456 eval_subst (arg0, cval1, maxval, cval2, minval),
5457 arg1));
5458 tree equal_result
5459 = fold (build (code, type,
5460 eval_subst (arg0, cval1, maxval, cval2, maxval),
5461 arg1));
5462 tree low_result
5463 = fold (build (code, type,
5464 eval_subst (arg0, cval1, minval, cval2, maxval),
5465 arg1));
5466
5467 /* All three of these results should be 0 or 1. Confirm they
5468 are. Then use those values to select the proper code
5469 to use. */
5470
5471 if ((integer_zerop (high_result)
5472 || integer_onep (high_result))
5473 && (integer_zerop (equal_result)
5474 || integer_onep (equal_result))
5475 && (integer_zerop (low_result)
5476 || integer_onep (low_result)))
5477 {
5478 /* Make a 3-bit mask with the high-order bit being the
5479 value for `>', the next for '=', and the low for '<'. */
5480 switch ((integer_onep (high_result) * 4)
5481 + (integer_onep (equal_result) * 2)
5482 + integer_onep (low_result))
5483 {
5484 case 0:
5485 /* Always false. */
5486 return omit_one_operand (type, integer_zero_node, arg0);
5487 case 1:
5488 code = LT_EXPR;
5489 break;
5490 case 2:
5491 code = EQ_EXPR;
5492 break;
5493 case 3:
5494 code = LE_EXPR;
5495 break;
5496 case 4:
5497 code = GT_EXPR;
5498 break;
5499 case 5:
5500 code = NE_EXPR;
5501 break;
5502 case 6:
5503 code = GE_EXPR;
5504 break;
5505 case 7:
5506 /* Always true. */
5507 return omit_one_operand (type, integer_one_node, arg0);
5508 }
5509
5510 t = build (code, type, cval1, cval2);
5511 if (save_p)
5512 return save_expr (t);
5513 else
5514 return fold (t);
5515 }
5516 }
5517 }
5518
5519 /* If this is a comparison of a field, we may be able to simplify it. */
5520 if ((TREE_CODE (arg0) == COMPONENT_REF
5521 || TREE_CODE (arg0) == BIT_FIELD_REF)
5522 && (code == EQ_EXPR || code == NE_EXPR)
5523 /* Handle the constant case even without -O
5524 to make sure the warnings are given. */
5525 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
5526 {
5527 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
5528 return t1 ? t1 : t;
5529 }
5530
5531 /* If this is a comparison of complex values and either or both
5532 sizes are a COMPLEX_EXPR, it is best to split up the comparisons
5533 and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR. This
5534 may prevent needless evaluations. */
5535 if ((code == EQ_EXPR || code == NE_EXPR)
5536 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
5537 && (TREE_CODE (arg0) == COMPLEX_EXPR
5538 || TREE_CODE (arg1) == COMPLEX_EXPR))
5539 {
5540 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
5541 tree real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
5542 tree imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
5543 tree real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
5544 tree imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
5545
5546 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
5547 : TRUTH_ORIF_EXPR),
5548 type,
5549 fold (build (code, type, real0, real1)),
5550 fold (build (code, type, imag0, imag1))));
5551 }
5552
5553 /* From here on, the only cases we handle are when the result is
5554 known to be a constant.
5555
5556 To compute GT, swap the arguments and do LT.
5557 To compute GE, do LT and invert the result.
5558 To compute LE, swap the arguments, do LT and invert the result.
5559 To compute NE, do EQ and invert the result.
5560
5561 Therefore, the code below must handle only EQ and LT. */
5562
5563 if (code == LE_EXPR || code == GT_EXPR)
5564 {
5565 tem = arg0, arg0 = arg1, arg1 = tem;
5566 code = swap_tree_comparison (code);
5567 }
5568
5569 /* Note that it is safe to invert for real values here because we
5570 will check below in the one case that it matters. */
5571
5572 invert = 0;
5573 if (code == NE_EXPR || code == GE_EXPR)
5574 {
5575 invert = 1;
5576 code = invert_tree_comparison (code);
5577 }
5578
5579 /* Compute a result for LT or EQ if args permit;
5580 otherwise return T. */
5581 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
5582 {
5583 if (code == EQ_EXPR)
5584 t1 = build_int_2 ((TREE_INT_CST_LOW (arg0)
5585 == TREE_INT_CST_LOW (arg1))
5586 && (TREE_INT_CST_HIGH (arg0)
5587 == TREE_INT_CST_HIGH (arg1)),
5588 0);
5589 else
5590 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
5591 ? INT_CST_LT_UNSIGNED (arg0, arg1)
5592 : INT_CST_LT (arg0, arg1)),
5593 0);
5594 }
5595
5596 #if 0 /* This is no longer useful, but breaks some real code. */
5597 /* Assume a nonexplicit constant cannot equal an explicit one,
5598 since such code would be undefined anyway.
5599 Exception: on sysvr4, using #pragma weak,
5600 a label can come out as 0. */
5601 else if (TREE_CODE (arg1) == INTEGER_CST
5602 && !integer_zerop (arg1)
5603 && TREE_CONSTANT (arg0)
5604 && TREE_CODE (arg0) == ADDR_EXPR
5605 && code == EQ_EXPR)
5606 t1 = build_int_2 (0, 0);
5607 #endif
5608 /* Two real constants can be compared explicitly. */
5609 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
5610 {
5611 /* If either operand is a NaN, the result is false with two
5612 exceptions: First, an NE_EXPR is true on NaNs, but that case
5613 is already handled correctly since we will be inverting the
5614 result for NE_EXPR. Second, if we had inverted a LE_EXPR
5615 or a GE_EXPR into a LT_EXPR, we must return true so that it
5616 will be inverted into false. */
5617
5618 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
5619 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
5620 t1 = build_int_2 (invert && code == LT_EXPR, 0);
5621
5622 else if (code == EQ_EXPR)
5623 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
5624 TREE_REAL_CST (arg1)),
5625 0);
5626 else
5627 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
5628 TREE_REAL_CST (arg1)),
5629 0);
5630 }
5631
5632 if (t1 == NULL_TREE)
5633 return t;
5634
5635 if (invert)
5636 TREE_INT_CST_LOW (t1) ^= 1;
5637
5638 TREE_TYPE (t1) = type;
5639 if (TREE_CODE (type) == BOOLEAN_TYPE)
5640 return truthvalue_conversion (t1);
5641 return t1;
5642
5643 case COND_EXPR:
5644 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
5645 so all simple results must be passed through pedantic_non_lvalue. */
5646 if (TREE_CODE (arg0) == INTEGER_CST)
5647 return pedantic_non_lvalue
5648 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
5649 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
5650 return pedantic_omit_one_operand (type, arg1, arg0);
5651
5652 /* If the second operand is zero, invert the comparison and swap
5653 the second and third operands. Likewise if the second operand
5654 is constant and the third is not or if the third operand is
5655 equivalent to the first operand of the comparison. */
5656
5657 if (integer_zerop (arg1)
5658 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
5659 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5660 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
5661 TREE_OPERAND (t, 2),
5662 TREE_OPERAND (arg0, 1))))
5663 {
5664 /* See if this can be inverted. If it can't, possibly because
5665 it was a floating-point inequality comparison, don't do
5666 anything. */
5667 tem = invert_truthvalue (arg0);
5668
5669 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
5670 {
5671 t = build (code, type, tem,
5672 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
5673 arg0 = tem;
5674 arg1 = TREE_OPERAND (t, 2);
5675 STRIP_NOPS (arg1);
5676 }
5677 }
5678
5679 /* If we have A op B ? A : C, we may be able to convert this to a
5680 simpler expression, depending on the operation and the values
5681 of B and C. IEEE floating point prevents this though,
5682 because A or B might be -0.0 or a NaN. */
5683
5684 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5685 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5686 || ! FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
5687 || flag_fast_math)
5688 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
5689 arg1, TREE_OPERAND (arg0, 1)))
5690 {
5691 tree arg2 = TREE_OPERAND (t, 2);
5692 enum tree_code comp_code = TREE_CODE (arg0);
5693
5694 STRIP_NOPS (arg2);
5695
5696 /* If we have A op 0 ? A : -A, this is A, -A, abs (A), or abs (-A),
5697 depending on the comparison operation. */
5698 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
5699 ? real_zerop (TREE_OPERAND (arg0, 1))
5700 : integer_zerop (TREE_OPERAND (arg0, 1)))
5701 && TREE_CODE (arg2) == NEGATE_EXPR
5702 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5703 switch (comp_code)
5704 {
5705 case EQ_EXPR:
5706 return pedantic_non_lvalue
5707 (fold (build1 (NEGATE_EXPR, type, arg1)));
5708 case NE_EXPR:
5709 return pedantic_non_lvalue (convert (type, arg1));
5710 case GE_EXPR:
5711 case GT_EXPR:
5712 return pedantic_non_lvalue
5713 (convert (type, fold (build1 (ABS_EXPR,
5714 TREE_TYPE (arg1), arg1))));
5715 case LE_EXPR:
5716 case LT_EXPR:
5717 return pedantic_non_lvalue
5718 (fold (build1 (NEGATE_EXPR, type,
5719 convert (type,
5720 fold (build1 (ABS_EXPR,
5721 TREE_TYPE (arg1),
5722 arg1))))));
5723 default:
5724 abort ();
5725 }
5726
5727 /* If this is A != 0 ? A : 0, this is simply A. For ==, it is
5728 always zero. */
5729
5730 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
5731 {
5732 if (comp_code == NE_EXPR)
5733 return pedantic_non_lvalue (convert (type, arg1));
5734 else if (comp_code == EQ_EXPR)
5735 return pedantic_non_lvalue (convert (type, integer_zero_node));
5736 }
5737
5738 /* If this is A op B ? A : B, this is either A, B, min (A, B),
5739 or max (A, B), depending on the operation. */
5740
5741 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
5742 arg2, TREE_OPERAND (arg0, 0)))
5743 {
5744 tree comp_op0 = TREE_OPERAND (arg0, 0);
5745 tree comp_op1 = TREE_OPERAND (arg0, 1);
5746 tree comp_type = TREE_TYPE (comp_op0);
5747
5748 switch (comp_code)
5749 {
5750 case EQ_EXPR:
5751 return pedantic_non_lvalue (convert (type, arg2));
5752 case NE_EXPR:
5753 return pedantic_non_lvalue (convert (type, arg1));
5754 case LE_EXPR:
5755 case LT_EXPR:
5756 /* In C++ a ?: expression can be an lvalue, so put the
5757 operand which will be used if they are equal first
5758 so that we can convert this back to the
5759 corresponding COND_EXPR. */
5760 return pedantic_non_lvalue
5761 (convert (type, (fold (build (MIN_EXPR, comp_type,
5762 (comp_code == LE_EXPR
5763 ? comp_op0 : comp_op1),
5764 (comp_code == LE_EXPR
5765 ? comp_op1 : comp_op0))))));
5766 break;
5767 case GE_EXPR:
5768 case GT_EXPR:
5769 return pedantic_non_lvalue
5770 (convert (type, fold (build (MAX_EXPR, comp_type,
5771 (comp_code == GE_EXPR
5772 ? comp_op0 : comp_op1),
5773 (comp_code == GE_EXPR
5774 ? comp_op1 : comp_op0)))));
5775 break;
5776 default:
5777 abort ();
5778 }
5779 }
5780
5781 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5782 we might still be able to simplify this. For example,
5783 if C1 is one less or one more than C2, this might have started
5784 out as a MIN or MAX and been transformed by this function.
5785 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5786
5787 if (INTEGRAL_TYPE_P (type)
5788 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5789 && TREE_CODE (arg2) == INTEGER_CST)
5790 switch (comp_code)
5791 {
5792 case EQ_EXPR:
5793 /* We can replace A with C1 in this case. */
5794 arg1 = convert (type, TREE_OPERAND (arg0, 1));
5795 t = build (code, type, TREE_OPERAND (t, 0), arg1,
5796 TREE_OPERAND (t, 2));
5797 break;
5798
5799 case LT_EXPR:
5800 /* If C1 is C2 + 1, this is min(A, C2). */
5801 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
5802 && operand_equal_p (TREE_OPERAND (arg0, 1),
5803 const_binop (PLUS_EXPR, arg2,
5804 integer_one_node, 0), 1))
5805 return pedantic_non_lvalue
5806 (fold (build (MIN_EXPR, type, arg1, arg2)));
5807 break;
5808
5809 case LE_EXPR:
5810 /* If C1 is C2 - 1, this is min(A, C2). */
5811 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
5812 && operand_equal_p (TREE_OPERAND (arg0, 1),
5813 const_binop (MINUS_EXPR, arg2,
5814 integer_one_node, 0), 1))
5815 return pedantic_non_lvalue
5816 (fold (build (MIN_EXPR, type, arg1, arg2)));
5817 break;
5818
5819 case GT_EXPR:
5820 /* If C1 is C2 - 1, this is max(A, C2). */
5821 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
5822 && operand_equal_p (TREE_OPERAND (arg0, 1),
5823 const_binop (MINUS_EXPR, arg2,
5824 integer_one_node, 0), 1))
5825 return pedantic_non_lvalue
5826 (fold (build (MAX_EXPR, type, arg1, arg2)));
5827 break;
5828
5829 case GE_EXPR:
5830 /* If C1 is C2 + 1, this is max(A, C2). */
5831 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
5832 && operand_equal_p (TREE_OPERAND (arg0, 1),
5833 const_binop (PLUS_EXPR, arg2,
5834 integer_one_node, 0), 1))
5835 return pedantic_non_lvalue
5836 (fold (build (MAX_EXPR, type, arg1, arg2)));
5837 break;
5838 case NE_EXPR:
5839 break;
5840 default:
5841 abort ();
5842 }
5843 }
5844
5845 /* If the second operand is simpler than the third, swap them
5846 since that produces better jump optimization results. */
5847 if ((TREE_CONSTANT (arg1) || TREE_CODE_CLASS (TREE_CODE (arg1)) == 'd'
5848 || TREE_CODE (arg1) == SAVE_EXPR)
5849 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
5850 || TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (t, 2))) == 'd'
5851 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
5852 {
5853 /* See if this can be inverted. If it can't, possibly because
5854 it was a floating-point inequality comparison, don't do
5855 anything. */
5856 tem = invert_truthvalue (arg0);
5857
5858 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
5859 {
5860 t = build (code, type, tem,
5861 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
5862 arg0 = tem;
5863 arg1 = TREE_OPERAND (t, 2);
5864 STRIP_NOPS (arg1);
5865 }
5866 }
5867
5868 /* Convert A ? 1 : 0 to simply A. */
5869 if (integer_onep (TREE_OPERAND (t, 1))
5870 && integer_zerop (TREE_OPERAND (t, 2))
5871 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
5872 call to fold will try to move the conversion inside
5873 a COND, which will recurse. In that case, the COND_EXPR
5874 is probably the best choice, so leave it alone. */
5875 && type == TREE_TYPE (arg0))
5876 return pedantic_non_lvalue (arg0);
5877
5878 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
5879 operation is simply A & 2. */
5880
5881 if (integer_zerop (TREE_OPERAND (t, 2))
5882 && TREE_CODE (arg0) == NE_EXPR
5883 && integer_zerop (TREE_OPERAND (arg0, 1))
5884 && integer_pow2p (arg1)
5885 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
5886 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
5887 arg1, 1))
5888 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
5889
5890 return t;
5891
5892 case COMPOUND_EXPR:
5893 /* When pedantic, a compound expression can be neither an lvalue
5894 nor an integer constant expression. */
5895 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
5896 return t;
5897 /* Don't let (0, 0) be null pointer constant. */
5898 if (integer_zerop (arg1))
5899 return non_lvalue (arg1);
5900 return arg1;
5901
5902 case COMPLEX_EXPR:
5903 if (wins)
5904 return build_complex (type, arg0, arg1);
5905 return t;
5906
5907 case REALPART_EXPR:
5908 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5909 return t;
5910 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5911 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
5912 TREE_OPERAND (arg0, 1));
5913 else if (TREE_CODE (arg0) == COMPLEX_CST)
5914 return TREE_REALPART (arg0);
5915 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5916 return fold (build (TREE_CODE (arg0), type,
5917 fold (build1 (REALPART_EXPR, type,
5918 TREE_OPERAND (arg0, 0))),
5919 fold (build1 (REALPART_EXPR,
5920 type, TREE_OPERAND (arg0, 1)))));
5921 return t;
5922
5923 case IMAGPART_EXPR:
5924 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5925 return convert (type, integer_zero_node);
5926 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5927 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
5928 TREE_OPERAND (arg0, 0));
5929 else if (TREE_CODE (arg0) == COMPLEX_CST)
5930 return TREE_IMAGPART (arg0);
5931 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5932 return fold (build (TREE_CODE (arg0), type,
5933 fold (build1 (IMAGPART_EXPR, type,
5934 TREE_OPERAND (arg0, 0))),
5935 fold (build1 (IMAGPART_EXPR, type,
5936 TREE_OPERAND (arg0, 1)))));
5937 return t;
5938
5939 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
5940 appropriate. */
5941 case CLEANUP_POINT_EXPR:
5942 if (! has_cleanups (arg0))
5943 return TREE_OPERAND (t, 0);
5944
5945 {
5946 enum tree_code code0 = TREE_CODE (arg0);
5947 int kind0 = TREE_CODE_CLASS (code0);
5948 tree arg00 = TREE_OPERAND (arg0, 0);
5949 tree arg01;
5950
5951 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
5952 return fold (build1 (code0, type,
5953 fold (build1 (CLEANUP_POINT_EXPR,
5954 TREE_TYPE (arg00), arg00))));
5955
5956 if (kind0 == '<' || kind0 == '2'
5957 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
5958 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
5959 || code0 == TRUTH_XOR_EXPR)
5960 {
5961 arg01 = TREE_OPERAND (arg0, 1);
5962
5963 if (TREE_CONSTANT (arg00)
5964 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
5965 && ! has_cleanups (arg00)))
5966 return fold (build (code0, type, arg00,
5967 fold (build1 (CLEANUP_POINT_EXPR,
5968 TREE_TYPE (arg01), arg01))));
5969
5970 if (TREE_CONSTANT (arg01))
5971 return fold (build (code0, type,
5972 fold (build1 (CLEANUP_POINT_EXPR,
5973 TREE_TYPE (arg00), arg00)),
5974 arg01));
5975 }
5976
5977 return t;
5978 }
5979
5980 default:
5981 return t;
5982 } /* switch (code) */
5983 }
5984
5985 /* Determine if first argument is a multiple of second argument.
5986 Return 0 if it is not, or is not easily determined to so be.
5987
5988 An example of the sort of thing we care about (at this point --
5989 this routine could surely be made more general, and expanded
5990 to do what the *_DIV_EXPR's fold() cases do now) is discovering
5991 that
5992
5993 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
5994
5995 is a multiple of
5996
5997 SAVE_EXPR (J * 8)
5998
5999 when we know that the two `SAVE_EXPR (J * 8)' nodes are the
6000 same node (which means they will have the same value at run
6001 time, even though we don't know when they'll be assigned).
6002
6003 This code also handles discovering that
6004
6005 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
6006
6007 is a multiple of
6008
6009 8
6010
6011 (of course) so we don't have to worry about dealing with a
6012 possible remainder.
6013
6014 Note that we _look_ inside a SAVE_EXPR only to determine
6015 how it was calculated; it is not safe for fold() to do much
6016 of anything else with the internals of a SAVE_EXPR, since
6017 fold() cannot know when it will be evaluated at run time.
6018 For example, the latter example above _cannot_ be implemented
6019 as
6020
6021 SAVE_EXPR (I) * J
6022
6023 or any variant thereof, since the value of J at evaluation time
6024 of the original SAVE_EXPR is not necessarily the same at the time
6025 the new expression is evaluated. The only optimization of this
6026 sort that would be valid is changing
6027
6028 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
6029 divided by
6030 8
6031
6032 to
6033
6034 SAVE_EXPR (I) * SAVE_EXPR (J)
6035
6036 (where the same SAVE_EXPR (J) is used in the original and the
6037 transformed version). */
6038
6039 static int
6040 multiple_of_p (type, top, bottom)
6041 tree type;
6042 tree top;
6043 tree bottom;
6044 {
6045 if (operand_equal_p (top, bottom, 0))
6046 return 1;
6047
6048 if (TREE_CODE (type) != INTEGER_TYPE)
6049 return 0;
6050
6051 switch (TREE_CODE (top))
6052 {
6053 case MULT_EXPR:
6054 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
6055 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
6056
6057 case PLUS_EXPR:
6058 case MINUS_EXPR:
6059 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
6060 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
6061
6062 case NOP_EXPR:
6063 /* Punt if conversion from non-integral or wider integral type. */
6064 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
6065 || (TYPE_PRECISION (type)
6066 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
6067 return 0;
6068 /* Fall through. */
6069 case SAVE_EXPR:
6070 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
6071
6072 case INTEGER_CST:
6073 if ((TREE_CODE (bottom) != INTEGER_CST)
6074 || (tree_int_cst_sgn (top) < 0)
6075 || (tree_int_cst_sgn (bottom) < 0))
6076 return 0;
6077 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
6078 top, bottom, 0));
6079
6080 default:
6081 return 0;
6082 }
6083 }
6084 \f