eh-common.h: PROTO -> PARAMS.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 88, 92-99, 2000 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 /*@@ This file should be rewritten to use an arbitrary precision
22 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
23 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
24 @@ The routines that translate from the ap rep should
25 @@ warn if precision et. al. is lost.
26 @@ This would also make life easier when this technology is used
27 @@ for cross-compilers. */
28
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
44
45 #include "config.h"
46 #include "system.h"
47 #include <setjmp.h>
48 #include "flags.h"
49 #include "tree.h"
50 #include "rtl.h"
51 #include "tm_p.h"
52 #include "toplev.h"
53 #include "ggc.h"
54
55 static void encode PARAMS ((HOST_WIDE_INT *,
56 HOST_WIDE_INT, HOST_WIDE_INT));
57 static void decode PARAMS ((HOST_WIDE_INT *,
58 HOST_WIDE_INT *, HOST_WIDE_INT *));
59 int div_and_round_double PARAMS ((enum tree_code, int, HOST_WIDE_INT,
60 HOST_WIDE_INT, HOST_WIDE_INT,
61 HOST_WIDE_INT, HOST_WIDE_INT *,
62 HOST_WIDE_INT *, HOST_WIDE_INT *,
63 HOST_WIDE_INT *));
64 static tree negate_expr PARAMS ((tree));
65 static tree split_tree PARAMS ((tree, enum tree_code, tree *, tree *,
66 int));
67 static tree associate_trees PARAMS ((tree, tree, enum tree_code, tree));
68 static tree int_const_binop PARAMS ((enum tree_code, tree, tree, int, int));
69 static void const_binop_1 PARAMS ((PTR));
70 static tree const_binop PARAMS ((enum tree_code, tree, tree, int));
71 static void fold_convert_1 PARAMS ((PTR));
72 static tree fold_convert PARAMS ((tree, tree));
73 static enum tree_code invert_tree_comparison PARAMS ((enum tree_code));
74 static enum tree_code swap_tree_comparison PARAMS ((enum tree_code));
75 static int truth_value_p PARAMS ((enum tree_code));
76 static int operand_equal_for_comparison_p PARAMS ((tree, tree, tree));
77 static int twoval_comparison_p PARAMS ((tree, tree *, tree *, int *));
78 static tree eval_subst PARAMS ((tree, tree, tree, tree, tree));
79 static tree omit_one_operand PARAMS ((tree, tree, tree));
80 static tree pedantic_omit_one_operand PARAMS ((tree, tree, tree));
81 static tree distribute_bit_expr PARAMS ((enum tree_code, tree, tree, tree));
82 static tree make_bit_field_ref PARAMS ((tree, tree, int, int, int));
83 static tree optimize_bit_field_compare PARAMS ((enum tree_code, tree,
84 tree, tree));
85 static tree decode_field_reference PARAMS ((tree, int *, int *,
86 enum machine_mode *, int *,
87 int *, tree *, tree *));
88 static int all_ones_mask_p PARAMS ((tree, int));
89 static int simple_operand_p PARAMS ((tree));
90 static tree range_binop PARAMS ((enum tree_code, tree, tree, int,
91 tree, int));
92 static tree make_range PARAMS ((tree, int *, tree *, tree *));
93 static tree build_range_check PARAMS ((tree, tree, int, tree, tree));
94 static int merge_ranges PARAMS ((int *, tree *, tree *, int, tree, tree,
95 int, tree, tree));
96 static tree fold_range_test PARAMS ((tree));
97 static tree unextend PARAMS ((tree, int, int, tree));
98 static tree fold_truthop PARAMS ((enum tree_code, tree, tree, tree));
99 static tree optimize_minmax_comparison PARAMS ((tree));
100 static tree extract_muldiv PARAMS ((tree, tree, enum tree_code, tree));
101 static tree strip_compound_expr PARAMS ((tree, tree));
102 static int multiple_of_p PARAMS ((tree, tree, tree));
103 static tree constant_boolean_node PARAMS ((int, tree));
104 static int count_cond PARAMS ((tree, int));
105
106 #ifndef BRANCH_COST
107 #define BRANCH_COST 1
108 #endif
109
110 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
111 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
112 and SUM1. Then this yields nonzero if overflow occurred during the
113 addition.
114
115 Overflow occurs if A and B have the same sign, but A and SUM differ in
116 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
117 sign. */
118 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
119 \f
120 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
121 We do that by representing the two-word integer in 4 words, with only
122 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
123 number. The value of the word is LOWPART + HIGHPART * BASE. */
124
125 #define LOWPART(x) \
126 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
127 #define HIGHPART(x) \
128 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
129 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
130
131 /* Unpack a two-word integer into 4 words.
132 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
133 WORDS points to the array of HOST_WIDE_INTs. */
134
135 static void
136 encode (words, low, hi)
137 HOST_WIDE_INT *words;
138 HOST_WIDE_INT low, hi;
139 {
140 words[0] = LOWPART (low);
141 words[1] = HIGHPART (low);
142 words[2] = LOWPART (hi);
143 words[3] = HIGHPART (hi);
144 }
145
146 /* Pack an array of 4 words into a two-word integer.
147 WORDS points to the array of words.
148 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
149
150 static void
151 decode (words, low, hi)
152 HOST_WIDE_INT *words;
153 HOST_WIDE_INT *low, *hi;
154 {
155 *low = words[0] + words[1] * BASE;
156 *hi = words[2] + words[3] * BASE;
157 }
158 \f
159 /* Make the integer constant T valid for its type by setting to 0 or 1 all
160 the bits in the constant that don't belong in the type.
161
162 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
163 nonzero, a signed overflow has already occurred in calculating T, so
164 propagate it.
165
166 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
167 if it exists. */
168
169 int
170 force_fit_type (t, overflow)
171 tree t;
172 int overflow;
173 {
174 HOST_WIDE_INT low, high;
175 register int prec;
176
177 if (TREE_CODE (t) == REAL_CST)
178 {
179 #ifdef CHECK_FLOAT_VALUE
180 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t)), TREE_REAL_CST (t),
181 overflow);
182 #endif
183 return overflow;
184 }
185
186 else if (TREE_CODE (t) != INTEGER_CST)
187 return overflow;
188
189 low = TREE_INT_CST_LOW (t);
190 high = TREE_INT_CST_HIGH (t);
191
192 if (POINTER_TYPE_P (TREE_TYPE (t)))
193 prec = POINTER_SIZE;
194 else
195 prec = TYPE_PRECISION (TREE_TYPE (t));
196
197 /* First clear all bits that are beyond the type's precision. */
198
199 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
200 ;
201 else if (prec > HOST_BITS_PER_WIDE_INT)
202 TREE_INT_CST_HIGH (t)
203 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
204 else
205 {
206 TREE_INT_CST_HIGH (t) = 0;
207 if (prec < HOST_BITS_PER_WIDE_INT)
208 TREE_INT_CST_LOW (t) &= ~((HOST_WIDE_INT) (-1) << prec);
209 }
210
211 /* Unsigned types do not suffer sign extension or overflow. */
212 if (TREE_UNSIGNED (TREE_TYPE (t)))
213 return overflow;
214
215 /* If the value's sign bit is set, extend the sign. */
216 if (prec != 2 * HOST_BITS_PER_WIDE_INT
217 && (prec > HOST_BITS_PER_WIDE_INT
218 ? (TREE_INT_CST_HIGH (t)
219 & ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
220 : TREE_INT_CST_LOW (t) & ((HOST_WIDE_INT) 1 << (prec - 1))))
221 {
222 /* Value is negative:
223 set to 1 all the bits that are outside this type's precision. */
224 if (prec > HOST_BITS_PER_WIDE_INT)
225 TREE_INT_CST_HIGH (t)
226 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
227 else
228 {
229 TREE_INT_CST_HIGH (t) = -1;
230 if (prec < HOST_BITS_PER_WIDE_INT)
231 TREE_INT_CST_LOW (t) |= ((HOST_WIDE_INT) (-1) << prec);
232 }
233 }
234
235 /* Return nonzero if signed overflow occurred. */
236 return
237 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
238 != 0);
239 }
240 \f
241 /* Add two doubleword integers with doubleword result.
242 Each argument is given as two `HOST_WIDE_INT' pieces.
243 One argument is L1 and H1; the other, L2 and H2.
244 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
245
246 int
247 add_double (l1, h1, l2, h2, lv, hv)
248 HOST_WIDE_INT l1, h1, l2, h2;
249 HOST_WIDE_INT *lv, *hv;
250 {
251 HOST_WIDE_INT l, h;
252
253 l = l1 + l2;
254 h = h1 + h2 + ((unsigned HOST_WIDE_INT) l < l1);
255
256 *lv = l;
257 *hv = h;
258 return OVERFLOW_SUM_SIGN (h1, h2, h);
259 }
260
261 /* Negate a doubleword integer with doubleword result.
262 Return nonzero if the operation overflows, assuming it's signed.
263 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
264 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
265
266 int
267 neg_double (l1, h1, lv, hv)
268 HOST_WIDE_INT l1, h1;
269 HOST_WIDE_INT *lv, *hv;
270 {
271 if (l1 == 0)
272 {
273 *lv = 0;
274 *hv = - h1;
275 return (*hv & h1) < 0;
276 }
277 else
278 {
279 *lv = - l1;
280 *hv = ~ h1;
281 return 0;
282 }
283 }
284 \f
285 /* Multiply two doubleword integers with doubleword result.
286 Return nonzero if the operation overflows, assuming it's signed.
287 Each argument is given as two `HOST_WIDE_INT' pieces.
288 One argument is L1 and H1; the other, L2 and H2.
289 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
290
291 int
292 mul_double (l1, h1, l2, h2, lv, hv)
293 HOST_WIDE_INT l1, h1, l2, h2;
294 HOST_WIDE_INT *lv, *hv;
295 {
296 HOST_WIDE_INT arg1[4];
297 HOST_WIDE_INT arg2[4];
298 HOST_WIDE_INT prod[4 * 2];
299 register unsigned HOST_WIDE_INT carry;
300 register int i, j, k;
301 HOST_WIDE_INT toplow, tophigh, neglow, neghigh;
302
303 encode (arg1, l1, h1);
304 encode (arg2, l2, h2);
305
306 bzero ((char *) prod, sizeof prod);
307
308 for (i = 0; i < 4; i++)
309 {
310 carry = 0;
311 for (j = 0; j < 4; j++)
312 {
313 k = i + j;
314 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
315 carry += arg1[i] * arg2[j];
316 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
317 carry += prod[k];
318 prod[k] = LOWPART (carry);
319 carry = HIGHPART (carry);
320 }
321 prod[i + 4] = carry;
322 }
323
324 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
325
326 /* Check for overflow by calculating the top half of the answer in full;
327 it should agree with the low half's sign bit. */
328 decode (prod+4, &toplow, &tophigh);
329 if (h1 < 0)
330 {
331 neg_double (l2, h2, &neglow, &neghigh);
332 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
333 }
334 if (h2 < 0)
335 {
336 neg_double (l1, h1, &neglow, &neghigh);
337 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
338 }
339 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
340 }
341 \f
342 /* Shift the doubleword integer in L1, H1 left by COUNT places
343 keeping only PREC bits of result.
344 Shift right if COUNT is negative.
345 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
346 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
347
348 void
349 lshift_double (l1, h1, count, prec, lv, hv, arith)
350 HOST_WIDE_INT l1, h1, count;
351 int prec;
352 HOST_WIDE_INT *lv, *hv;
353 int arith;
354 {
355 if (count < 0)
356 {
357 rshift_double (l1, h1, - count, prec, lv, hv, arith);
358 return;
359 }
360
361 #ifdef SHIFT_COUNT_TRUNCATED
362 if (SHIFT_COUNT_TRUNCATED)
363 count %= prec;
364 #endif
365
366 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
367 {
368 /* Shifting by the host word size is undefined according to the
369 ANSI standard, so we must handle this as a special case. */
370 *hv = 0;
371 *lv = 0;
372 }
373 else if (count >= HOST_BITS_PER_WIDE_INT)
374 {
375 *hv = (unsigned HOST_WIDE_INT) l1 << (count - HOST_BITS_PER_WIDE_INT);
376 *lv = 0;
377 }
378 else
379 {
380 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
381 | ((unsigned HOST_WIDE_INT) l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
382 *lv = (unsigned HOST_WIDE_INT) l1 << count;
383 }
384 }
385
386 /* Shift the doubleword integer in L1, H1 right by COUNT places
387 keeping only PREC bits of result. COUNT must be positive.
388 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
389 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
390
391 void
392 rshift_double (l1, h1, count, prec, lv, hv, arith)
393 HOST_WIDE_INT l1, h1, count;
394 int prec ATTRIBUTE_UNUSED;
395 HOST_WIDE_INT *lv, *hv;
396 int arith;
397 {
398 unsigned HOST_WIDE_INT signmask;
399 signmask = (arith
400 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
401 : 0);
402
403 #ifdef SHIFT_COUNT_TRUNCATED
404 if (SHIFT_COUNT_TRUNCATED)
405 count %= prec;
406 #endif
407
408 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
409 {
410 /* Shifting by the host word size is undefined according to the
411 ANSI standard, so we must handle this as a special case. */
412 *hv = signmask;
413 *lv = signmask;
414 }
415 else if (count >= HOST_BITS_PER_WIDE_INT)
416 {
417 *hv = signmask;
418 *lv = ((signmask << (2 * HOST_BITS_PER_WIDE_INT - count - 1) << 1)
419 | ((unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT)));
420 }
421 else
422 {
423 *lv = (((unsigned HOST_WIDE_INT) l1 >> count)
424 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
425 *hv = ((signmask << (HOST_BITS_PER_WIDE_INT - count))
426 | ((unsigned HOST_WIDE_INT) h1 >> count));
427 }
428 }
429 \f
430 /* Rotate the doubleword integer in L1, H1 left by COUNT places
431 keeping only PREC bits of result.
432 Rotate right if COUNT is negative.
433 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
434
435 void
436 lrotate_double (l1, h1, count, prec, lv, hv)
437 HOST_WIDE_INT l1, h1, count;
438 int prec;
439 HOST_WIDE_INT *lv, *hv;
440 {
441 HOST_WIDE_INT s1l, s1h, s2l, s2h;
442
443 count %= prec;
444 if (count < 0)
445 count += prec;
446
447 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
448 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
449 *lv = s1l | s2l;
450 *hv = s1h | s2h;
451 }
452
453 /* Rotate the doubleword integer in L1, H1 left by COUNT places
454 keeping only PREC bits of result. COUNT must be positive.
455 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
456
457 void
458 rrotate_double (l1, h1, count, prec, lv, hv)
459 HOST_WIDE_INT l1, h1, count;
460 int prec;
461 HOST_WIDE_INT *lv, *hv;
462 {
463 HOST_WIDE_INT s1l, s1h, s2l, s2h;
464
465 count %= prec;
466 if (count < 0)
467 count += prec;
468
469 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
470 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
471 *lv = s1l | s2l;
472 *hv = s1h | s2h;
473 }
474 \f
475 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
476 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
477 CODE is a tree code for a kind of division, one of
478 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
479 or EXACT_DIV_EXPR
480 It controls how the quotient is rounded to a integer.
481 Return nonzero if the operation overflows.
482 UNS nonzero says do unsigned division. */
483
484 int
485 div_and_round_double (code, uns,
486 lnum_orig, hnum_orig, lden_orig, hden_orig,
487 lquo, hquo, lrem, hrem)
488 enum tree_code code;
489 int uns;
490 HOST_WIDE_INT lnum_orig, hnum_orig; /* num == numerator == dividend */
491 HOST_WIDE_INT lden_orig, hden_orig; /* den == denominator == divisor */
492 HOST_WIDE_INT *lquo, *hquo, *lrem, *hrem;
493 {
494 int quo_neg = 0;
495 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
496 HOST_WIDE_INT den[4], quo[4];
497 register int i, j;
498 unsigned HOST_WIDE_INT work;
499 register unsigned HOST_WIDE_INT carry = 0;
500 HOST_WIDE_INT lnum = lnum_orig;
501 HOST_WIDE_INT hnum = hnum_orig;
502 HOST_WIDE_INT lden = lden_orig;
503 HOST_WIDE_INT hden = hden_orig;
504 int overflow = 0;
505
506 if ((hden == 0) && (lden == 0))
507 overflow = 1, lden = 1;
508
509 /* calculate quotient sign and convert operands to unsigned. */
510 if (!uns)
511 {
512 if (hnum < 0)
513 {
514 quo_neg = ~ quo_neg;
515 /* (minimum integer) / (-1) is the only overflow case. */
516 if (neg_double (lnum, hnum, &lnum, &hnum) && (lden & hden) == -1)
517 overflow = 1;
518 }
519 if (hden < 0)
520 {
521 quo_neg = ~ quo_neg;
522 neg_double (lden, hden, &lden, &hden);
523 }
524 }
525
526 if (hnum == 0 && hden == 0)
527 { /* single precision */
528 *hquo = *hrem = 0;
529 /* This unsigned division rounds toward zero. */
530 *lquo = lnum / (unsigned HOST_WIDE_INT) lden;
531 goto finish_up;
532 }
533
534 if (hnum == 0)
535 { /* trivial case: dividend < divisor */
536 /* hden != 0 already checked. */
537 *hquo = *lquo = 0;
538 *hrem = hnum;
539 *lrem = lnum;
540 goto finish_up;
541 }
542
543 bzero ((char *) quo, sizeof quo);
544
545 bzero ((char *) num, sizeof num); /* to zero 9th element */
546 bzero ((char *) den, sizeof den);
547
548 encode (num, lnum, hnum);
549 encode (den, lden, hden);
550
551 /* Special code for when the divisor < BASE. */
552 if (hden == 0 && lden < (HOST_WIDE_INT) BASE)
553 {
554 /* hnum != 0 already checked. */
555 for (i = 4 - 1; i >= 0; i--)
556 {
557 work = num[i] + carry * BASE;
558 quo[i] = work / (unsigned HOST_WIDE_INT) lden;
559 carry = work % (unsigned HOST_WIDE_INT) lden;
560 }
561 }
562 else
563 {
564 /* Full double precision division,
565 with thanks to Don Knuth's "Seminumerical Algorithms". */
566 int num_hi_sig, den_hi_sig;
567 unsigned HOST_WIDE_INT quo_est, scale;
568
569 /* Find the highest non-zero divisor digit. */
570 for (i = 4 - 1; ; i--)
571 if (den[i] != 0) {
572 den_hi_sig = i;
573 break;
574 }
575
576 /* Insure that the first digit of the divisor is at least BASE/2.
577 This is required by the quotient digit estimation algorithm. */
578
579 scale = BASE / (den[den_hi_sig] + 1);
580 if (scale > 1) { /* scale divisor and dividend */
581 carry = 0;
582 for (i = 0; i <= 4 - 1; i++) {
583 work = (num[i] * scale) + carry;
584 num[i] = LOWPART (work);
585 carry = HIGHPART (work);
586 } num[4] = carry;
587 carry = 0;
588 for (i = 0; i <= 4 - 1; i++) {
589 work = (den[i] * scale) + carry;
590 den[i] = LOWPART (work);
591 carry = HIGHPART (work);
592 if (den[i] != 0) den_hi_sig = i;
593 }
594 }
595
596 num_hi_sig = 4;
597
598 /* Main loop */
599 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--) {
600 /* guess the next quotient digit, quo_est, by dividing the first
601 two remaining dividend digits by the high order quotient digit.
602 quo_est is never low and is at most 2 high. */
603 unsigned HOST_WIDE_INT tmp;
604
605 num_hi_sig = i + den_hi_sig + 1;
606 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
607 if (num[num_hi_sig] != den[den_hi_sig])
608 quo_est = work / den[den_hi_sig];
609 else
610 quo_est = BASE - 1;
611
612 /* refine quo_est so it's usually correct, and at most one high. */
613 tmp = work - quo_est * den[den_hi_sig];
614 if (tmp < BASE
615 && den[den_hi_sig - 1] * quo_est > (tmp * BASE + num[num_hi_sig - 2]))
616 quo_est--;
617
618 /* Try QUO_EST as the quotient digit, by multiplying the
619 divisor by QUO_EST and subtracting from the remaining dividend.
620 Keep in mind that QUO_EST is the I - 1st digit. */
621
622 carry = 0;
623 for (j = 0; j <= den_hi_sig; j++)
624 {
625 work = quo_est * den[j] + carry;
626 carry = HIGHPART (work);
627 work = num[i + j] - LOWPART (work);
628 num[i + j] = LOWPART (work);
629 carry += HIGHPART (work) != 0;
630 }
631
632 /* if quo_est was high by one, then num[i] went negative and
633 we need to correct things. */
634
635 if (num[num_hi_sig] < carry)
636 {
637 quo_est--;
638 carry = 0; /* add divisor back in */
639 for (j = 0; j <= den_hi_sig; j++)
640 {
641 work = num[i + j] + den[j] + carry;
642 carry = HIGHPART (work);
643 num[i + j] = LOWPART (work);
644 }
645 num [num_hi_sig] += carry;
646 }
647
648 /* store the quotient digit. */
649 quo[i] = quo_est;
650 }
651 }
652
653 decode (quo, lquo, hquo);
654
655 finish_up:
656 /* if result is negative, make it so. */
657 if (quo_neg)
658 neg_double (*lquo, *hquo, lquo, hquo);
659
660 /* compute trial remainder: rem = num - (quo * den) */
661 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
662 neg_double (*lrem, *hrem, lrem, hrem);
663 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
664
665 switch (code)
666 {
667 case TRUNC_DIV_EXPR:
668 case TRUNC_MOD_EXPR: /* round toward zero */
669 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
670 return overflow;
671
672 case FLOOR_DIV_EXPR:
673 case FLOOR_MOD_EXPR: /* round toward negative infinity */
674 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
675 {
676 /* quo = quo - 1; */
677 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
678 lquo, hquo);
679 }
680 else return overflow;
681 break;
682
683 case CEIL_DIV_EXPR:
684 case CEIL_MOD_EXPR: /* round toward positive infinity */
685 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
686 {
687 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
688 lquo, hquo);
689 }
690 else return overflow;
691 break;
692
693 case ROUND_DIV_EXPR:
694 case ROUND_MOD_EXPR: /* round to closest integer */
695 {
696 HOST_WIDE_INT labs_rem = *lrem, habs_rem = *hrem;
697 HOST_WIDE_INT labs_den = lden, habs_den = hden, ltwice, htwice;
698
699 /* get absolute values */
700 if (*hrem < 0) neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
701 if (hden < 0) neg_double (lden, hden, &labs_den, &habs_den);
702
703 /* if (2 * abs (lrem) >= abs (lden)) */
704 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
705 labs_rem, habs_rem, &ltwice, &htwice);
706 if (((unsigned HOST_WIDE_INT) habs_den
707 < (unsigned HOST_WIDE_INT) htwice)
708 || (((unsigned HOST_WIDE_INT) habs_den
709 == (unsigned HOST_WIDE_INT) htwice)
710 && ((HOST_WIDE_INT unsigned) labs_den
711 < (unsigned HOST_WIDE_INT) ltwice)))
712 {
713 if (*hquo < 0)
714 /* quo = quo - 1; */
715 add_double (*lquo, *hquo,
716 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
717 else
718 /* quo = quo + 1; */
719 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
720 lquo, hquo);
721 }
722 else return overflow;
723 }
724 break;
725
726 default:
727 abort ();
728 }
729
730 /* compute true remainder: rem = num - (quo * den) */
731 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
732 neg_double (*lrem, *hrem, lrem, hrem);
733 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
734 return overflow;
735 }
736 \f
737 #ifndef REAL_ARITHMETIC
738 /* Effectively truncate a real value to represent the nearest possible value
739 in a narrower mode. The result is actually represented in the same data
740 type as the argument, but its value is usually different.
741
742 A trap may occur during the FP operations and it is the responsibility
743 of the calling function to have a handler established. */
744
745 REAL_VALUE_TYPE
746 real_value_truncate (mode, arg)
747 enum machine_mode mode;
748 REAL_VALUE_TYPE arg;
749 {
750 return REAL_VALUE_TRUNCATE (mode, arg);
751 }
752
753 #if TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
754
755 /* Check for infinity in an IEEE double precision number. */
756
757 int
758 target_isinf (x)
759 REAL_VALUE_TYPE x;
760 {
761 /* The IEEE 64-bit double format. */
762 union {
763 REAL_VALUE_TYPE d;
764 struct {
765 unsigned sign : 1;
766 unsigned exponent : 11;
767 unsigned mantissa1 : 20;
768 unsigned mantissa2;
769 } little_endian;
770 struct {
771 unsigned mantissa2;
772 unsigned mantissa1 : 20;
773 unsigned exponent : 11;
774 unsigned sign : 1;
775 } big_endian;
776 } u;
777
778 u.d = dconstm1;
779 if (u.big_endian.sign == 1)
780 {
781 u.d = x;
782 return (u.big_endian.exponent == 2047
783 && u.big_endian.mantissa1 == 0
784 && u.big_endian.mantissa2 == 0);
785 }
786 else
787 {
788 u.d = x;
789 return (u.little_endian.exponent == 2047
790 && u.little_endian.mantissa1 == 0
791 && u.little_endian.mantissa2 == 0);
792 }
793 }
794
795 /* Check whether an IEEE double precision number is a NaN. */
796
797 int
798 target_isnan (x)
799 REAL_VALUE_TYPE x;
800 {
801 /* The IEEE 64-bit double format. */
802 union {
803 REAL_VALUE_TYPE d;
804 struct {
805 unsigned sign : 1;
806 unsigned exponent : 11;
807 unsigned mantissa1 : 20;
808 unsigned mantissa2;
809 } little_endian;
810 struct {
811 unsigned mantissa2;
812 unsigned mantissa1 : 20;
813 unsigned exponent : 11;
814 unsigned sign : 1;
815 } big_endian;
816 } u;
817
818 u.d = dconstm1;
819 if (u.big_endian.sign == 1)
820 {
821 u.d = x;
822 return (u.big_endian.exponent == 2047
823 && (u.big_endian.mantissa1 != 0
824 || u.big_endian.mantissa2 != 0));
825 }
826 else
827 {
828 u.d = x;
829 return (u.little_endian.exponent == 2047
830 && (u.little_endian.mantissa1 != 0
831 || u.little_endian.mantissa2 != 0));
832 }
833 }
834
835 /* Check for a negative IEEE double precision number. */
836
837 int
838 target_negative (x)
839 REAL_VALUE_TYPE x;
840 {
841 /* The IEEE 64-bit double format. */
842 union {
843 REAL_VALUE_TYPE d;
844 struct {
845 unsigned sign : 1;
846 unsigned exponent : 11;
847 unsigned mantissa1 : 20;
848 unsigned mantissa2;
849 } little_endian;
850 struct {
851 unsigned mantissa2;
852 unsigned mantissa1 : 20;
853 unsigned exponent : 11;
854 unsigned sign : 1;
855 } big_endian;
856 } u;
857
858 u.d = dconstm1;
859 if (u.big_endian.sign == 1)
860 {
861 u.d = x;
862 return u.big_endian.sign;
863 }
864 else
865 {
866 u.d = x;
867 return u.little_endian.sign;
868 }
869 }
870 #else /* Target not IEEE */
871
872 /* Let's assume other float formats don't have infinity.
873 (This can be overridden by redefining REAL_VALUE_ISINF.) */
874
875 int
876 target_isinf (x)
877 REAL_VALUE_TYPE x;
878 {
879 return 0;
880 }
881
882 /* Let's assume other float formats don't have NaNs.
883 (This can be overridden by redefining REAL_VALUE_ISNAN.) */
884
885 int
886 target_isnan (x)
887 REAL_VALUE_TYPE x;
888 {
889 return 0;
890 }
891
892 /* Let's assume other float formats don't have minus zero.
893 (This can be overridden by redefining REAL_VALUE_NEGATIVE.) */
894
895 int
896 target_negative (x)
897 REAL_VALUE_TYPE x;
898 {
899 return x < 0;
900 }
901 #endif /* Target not IEEE */
902
903 /* Try to change R into its exact multiplicative inverse in machine mode
904 MODE. Return nonzero function value if successful. */
905
906 int
907 exact_real_inverse (mode, r)
908 enum machine_mode mode;
909 REAL_VALUE_TYPE *r;
910 {
911 jmp_buf float_error;
912 union
913 {
914 double d;
915 unsigned short i[4];
916 }x, t, y;
917 #ifdef CHECK_FLOAT_VALUE
918 int i;
919 #endif
920
921 /* Usually disable if bounds checks are not reliable. */
922 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT) && !flag_pretend_float)
923 return 0;
924
925 /* Set array index to the less significant bits in the unions, depending
926 on the endian-ness of the host doubles.
927 Disable if insufficient information on the data structure. */
928 #if HOST_FLOAT_FORMAT == UNKNOWN_FLOAT_FORMAT
929 return 0;
930 #else
931 #if HOST_FLOAT_FORMAT == VAX_FLOAT_FORMAT
932 #define K 2
933 #else
934 #if HOST_FLOAT_FORMAT == IBM_FLOAT_FORMAT
935 #define K 2
936 #else
937 #define K (2 * HOST_FLOAT_WORDS_BIG_ENDIAN)
938 #endif
939 #endif
940 #endif
941
942 if (setjmp (float_error))
943 {
944 /* Don't do the optimization if there was an arithmetic error. */
945 fail:
946 set_float_handler (NULL_PTR);
947 return 0;
948 }
949 set_float_handler (float_error);
950
951 /* Domain check the argument. */
952 x.d = *r;
953 if (x.d == 0.0)
954 goto fail;
955
956 #ifdef REAL_INFINITY
957 if (REAL_VALUE_ISINF (x.d) || REAL_VALUE_ISNAN (x.d))
958 goto fail;
959 #endif
960
961 /* Compute the reciprocal and check for numerical exactness.
962 It is unnecessary to check all the significand bits to determine
963 whether X is a power of 2. If X is not, then it is impossible for
964 the bottom half significand of both X and 1/X to be all zero bits.
965 Hence we ignore the data structure of the top half and examine only
966 the low order bits of the two significands. */
967 t.d = 1.0 / x.d;
968 if (x.i[K] != 0 || x.i[K + 1] != 0 || t.i[K] != 0 || t.i[K + 1] != 0)
969 goto fail;
970
971 /* Truncate to the required mode and range-check the result. */
972 y.d = REAL_VALUE_TRUNCATE (mode, t.d);
973 #ifdef CHECK_FLOAT_VALUE
974 i = 0;
975 if (CHECK_FLOAT_VALUE (mode, y.d, i))
976 goto fail;
977 #endif
978
979 /* Fail if truncation changed the value. */
980 if (y.d != t.d || y.d == 0.0)
981 goto fail;
982
983 #ifdef REAL_INFINITY
984 if (REAL_VALUE_ISINF (y.d) || REAL_VALUE_ISNAN (y.d))
985 goto fail;
986 #endif
987
988 /* Output the reciprocal and return success flag. */
989 set_float_handler (NULL_PTR);
990 *r = y.d;
991 return 1;
992 }
993
994 /* Convert C9X hexadecimal floating point string constant S. Return
995 real value type in mode MODE. This function uses the host computer's
996 floating point arithmetic when there is no REAL_ARITHMETIC. */
997
998 REAL_VALUE_TYPE
999 real_hex_to_f (s, mode)
1000 char *s;
1001 enum machine_mode mode;
1002 {
1003 REAL_VALUE_TYPE ip;
1004 char *p = s;
1005 unsigned HOST_WIDE_INT low, high;
1006 int shcount, nrmcount, k;
1007 int sign, expsign, isfloat;
1008 int lost = 0;/* Nonzero low order bits shifted out and discarded. */
1009 int frexpon = 0; /* Bits after the decimal point. */
1010 int expon = 0; /* Value of exponent. */
1011 int decpt = 0; /* How many decimal points. */
1012 int gotp = 0; /* How many P's. */
1013 char c;
1014
1015 isfloat = 0;
1016 expsign = 1;
1017 ip = 0.0;
1018
1019 while (*p == ' ' || *p == '\t')
1020 ++p;
1021
1022 /* Sign, if any, comes first. */
1023 sign = 1;
1024 if (*p == '-')
1025 {
1026 sign = -1;
1027 ++p;
1028 }
1029
1030 /* The string is supposed to start with 0x or 0X . */
1031 if (*p == '0')
1032 {
1033 ++p;
1034 if (*p == 'x' || *p == 'X')
1035 ++p;
1036 else
1037 abort ();
1038 }
1039 else
1040 abort ();
1041
1042 while (*p == '0')
1043 ++p;
1044
1045 high = 0;
1046 low = 0;
1047 shcount = 0;
1048 while ((c = *p) != '\0')
1049 {
1050 if ((c >= '0' && c <= '9') || (c >= 'A' && c <= 'F')
1051 || (c >= 'a' && c <= 'f'))
1052 {
1053 k = c & 0x7f;
1054 if (k >= 'a')
1055 k = k - 'a' + 10;
1056 else if (k >= 'A')
1057 k = k - 'A' + 10;
1058 else
1059 k = k - '0';
1060
1061 if ((high & 0xf0000000) == 0)
1062 {
1063 high = (high << 4) + ((low >> 28) & 15);
1064 low = (low << 4) + k;
1065 shcount += 4;
1066 if (decpt)
1067 frexpon += 4;
1068 }
1069 else
1070 {
1071 /* Record nonzero lost bits. */
1072 lost |= k;
1073 if (! decpt)
1074 frexpon -= 4;
1075 }
1076 ++p;
1077 }
1078 else if ( c == '.')
1079 {
1080 ++decpt;
1081 ++p;
1082 }
1083
1084 else if (c == 'p' || c == 'P')
1085 {
1086 ++gotp;
1087 ++p;
1088 /* Sign of exponent. */
1089 if (*p == '-')
1090 {
1091 expsign = -1;
1092 ++p;
1093 }
1094
1095 /* Value of exponent.
1096 The exponent field is a decimal integer. */
1097 while (ISDIGIT(*p))
1098 {
1099 k = (*p++ & 0x7f) - '0';
1100 expon = 10 * expon + k;
1101 }
1102
1103 expon *= expsign;
1104 /* F suffix is ambiguous in the significand part
1105 so it must appear after the decimal exponent field. */
1106 if (*p == 'f' || *p == 'F')
1107 {
1108 isfloat = 1;
1109 ++p;
1110 break;
1111 }
1112 }
1113
1114 else if (c == 'l' || c == 'L')
1115 {
1116 ++p;
1117 break;
1118 }
1119 else
1120 break;
1121 }
1122
1123 /* Abort if last character read was not legitimate. */
1124 c = *p;
1125 if ((c != '\0' && c != ' ' && c != '\n' && c != '\r') || (decpt > 1))
1126 abort ();
1127
1128 /* There must be either one decimal point or one p. */
1129 if (decpt == 0 && gotp == 0)
1130 abort ();
1131
1132 shcount -= 4;
1133 if (high == 0 && low == 0)
1134 return dconst0;
1135
1136 /* Normalize. */
1137 nrmcount = 0;
1138 if (high == 0)
1139 {
1140 high = low;
1141 low = 0;
1142 nrmcount += 32;
1143 }
1144
1145 /* Leave a high guard bit for carry-out. */
1146 if ((high & 0x80000000) != 0)
1147 {
1148 lost |= low & 1;
1149 low = (low >> 1) | (high << 31);
1150 high = high >> 1;
1151 nrmcount -= 1;
1152 }
1153
1154 if ((high & 0xffff8000) == 0)
1155 {
1156 high = (high << 16) + ((low >> 16) & 0xffff);
1157 low = low << 16;
1158 nrmcount += 16;
1159 }
1160
1161 while ((high & 0xc0000000) == 0)
1162 {
1163 high = (high << 1) + ((low >> 31) & 1);
1164 low = low << 1;
1165 nrmcount += 1;
1166 }
1167
1168 if (isfloat || GET_MODE_SIZE(mode) == UNITS_PER_WORD)
1169 {
1170 /* Keep 24 bits precision, bits 0x7fffff80.
1171 Rounding bit is 0x40. */
1172 lost = lost | low | (high & 0x3f);
1173 low = 0;
1174 if (high & 0x40)
1175 {
1176 if ((high & 0x80) || lost)
1177 high += 0x40;
1178 }
1179 high &= 0xffffff80;
1180 }
1181 else
1182 {
1183 /* We need real.c to do long double formats, so here default
1184 to double precision. */
1185 #if HOST_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
1186 /* IEEE double.
1187 Keep 53 bits precision, bits 0x7fffffff fffffc00.
1188 Rounding bit is low word 0x200. */
1189 lost = lost | (low & 0x1ff);
1190 if (low & 0x200)
1191 {
1192 if ((low & 0x400) || lost)
1193 {
1194 low = (low + 0x200) & 0xfffffc00;
1195 if (low == 0)
1196 high += 1;
1197 }
1198 }
1199 low &= 0xfffffc00;
1200 #else
1201 /* Assume it's a VAX with 56-bit significand,
1202 bits 0x7fffffff ffffff80. */
1203 lost = lost | (low & 0x7f);
1204 if (low & 0x40)
1205 {
1206 if ((low & 0x80) || lost)
1207 {
1208 low = (low + 0x40) & 0xffffff80;
1209 if (low == 0)
1210 high += 1;
1211 }
1212 }
1213 low &= 0xffffff80;
1214 #endif
1215 }
1216
1217 ip = (double) high;
1218 ip = REAL_VALUE_LDEXP (ip, 32) + (double) low;
1219 /* Apply shifts and exponent value as power of 2. */
1220 ip = REAL_VALUE_LDEXP (ip, expon - (nrmcount + frexpon));
1221
1222 if (sign < 0)
1223 ip = -ip;
1224 return ip;
1225 }
1226
1227 #endif /* no REAL_ARITHMETIC */
1228 \f
1229 /* Given T, an expression, return the negation of T. Allow for T to be
1230 null, in which case return null. */
1231
1232 static tree
1233 negate_expr (t)
1234 tree t;
1235 {
1236 tree type;
1237 tree tem;
1238
1239 if (t == 0)
1240 return 0;
1241
1242 type = TREE_TYPE (t);
1243 STRIP_SIGN_NOPS (t);
1244
1245 switch (TREE_CODE (t))
1246 {
1247 case INTEGER_CST:
1248 case REAL_CST:
1249 if (! TREE_UNSIGNED (type)
1250 && 0 != (tem = fold (build1 (NEGATE_EXPR, type, t)))
1251 && ! TREE_OVERFLOW (tem))
1252 return tem;
1253 break;
1254
1255 case NEGATE_EXPR:
1256 return convert (type, TREE_OPERAND (t, 0));
1257
1258 case MINUS_EXPR:
1259 /* - (A - B) -> B - A */
1260 if (! FLOAT_TYPE_P (type) || flag_fast_math)
1261 return convert (type,
1262 fold (build (MINUS_EXPR, TREE_TYPE (t),
1263 TREE_OPERAND (t, 1),
1264 TREE_OPERAND (t, 0))));
1265 break;
1266
1267 default:
1268 break;
1269 }
1270
1271 return convert (type, build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1272 }
1273 \f
1274 /* Split a tree IN into a constant, literal and variable parts that could be
1275 combined with CODE to make IN. "constant" means an expression with
1276 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1277 commutative arithmetic operation. Store the constant part into *CONP,
1278 the literal in &LITP and return the variable part. If a part isn't
1279 present, set it to null. If the tree does not decompose in this way,
1280 return the entire tree as the variable part and the other parts as null.
1281
1282 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1283 case, we negate an operand that was subtracted. If NEGATE_P is true, we
1284 are negating all of IN.
1285
1286 If IN is itself a literal or constant, return it as appropriate.
1287
1288 Note that we do not guarantee that any of the three values will be the
1289 same type as IN, but they will have the same signedness and mode. */
1290
1291 static tree
1292 split_tree (in, code, conp, litp, negate_p)
1293 tree in;
1294 enum tree_code code;
1295 tree *conp, *litp;
1296 int negate_p;
1297 {
1298 tree var = 0;
1299
1300 *conp = 0;
1301 *litp = 0;
1302
1303 /* Strip any conversions that don't change the machine mode or signedness. */
1304 STRIP_SIGN_NOPS (in);
1305
1306 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1307 *litp = in;
1308 else if (TREE_CONSTANT (in))
1309 *conp = in;
1310
1311 else if (TREE_CODE (in) == code
1312 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1313 /* We can associate addition and subtraction together (even
1314 though the C standard doesn't say so) for integers because
1315 the value is not affected. For reals, the value might be
1316 affected, so we can't. */
1317 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1318 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1319 {
1320 tree op0 = TREE_OPERAND (in, 0);
1321 tree op1 = TREE_OPERAND (in, 1);
1322 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1323 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1324
1325 /* First see if either of the operands is a literal, then a constant. */
1326 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1327 *litp = op0, op0 = 0;
1328 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1329 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1330
1331 if (op0 != 0 && TREE_CONSTANT (op0))
1332 *conp = op0, op0 = 0;
1333 else if (op1 != 0 && TREE_CONSTANT (op1))
1334 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1335
1336 /* If we haven't dealt with either operand, this is not a case we can
1337 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1338 if (op0 != 0 && op1 != 0)
1339 var = in;
1340 else if (op0 != 0)
1341 var = op0;
1342 else
1343 var = op1, neg_var_p = neg1_p;
1344
1345 /* Now do any needed negations. */
1346 if (neg_litp_p) *litp = negate_expr (*litp);
1347 if (neg_conp_p) *conp = negate_expr (*conp);
1348 if (neg_var_p) var = negate_expr (var);
1349 }
1350 else
1351 var = in;
1352
1353 if (negate_p)
1354 {
1355 var = negate_expr (var);
1356 *conp = negate_expr (*conp);
1357 *litp = negate_expr (*litp);
1358 }
1359
1360 return var;
1361 }
1362
1363 /* Re-associate trees split by the above function. T1 and T2 are either
1364 expressions to associate or null. Return the new expression, if any. If
1365 we build an operation, do it in TYPE and with CODE, except if CODE is a
1366 MINUS_EXPR, in which case we use PLUS_EXPR since split_tree will already
1367 have taken care of the negations. */
1368
1369 static tree
1370 associate_trees (t1, t2, code, type)
1371 tree t1, t2;
1372 enum tree_code code;
1373 tree type;
1374 {
1375 if (t1 == 0)
1376 return t2;
1377 else if (t2 == 0)
1378 return t1;
1379
1380 if (code == MINUS_EXPR)
1381 code = PLUS_EXPR;
1382
1383 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1384 try to fold this since we will have infinite recursion. But do
1385 deal with any NEGATE_EXPRs. */
1386 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1387 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1388 {
1389 if (TREE_CODE (t1) == NEGATE_EXPR)
1390 return build (MINUS_EXPR, type, convert (type, t2),
1391 convert (type, TREE_OPERAND (t1, 0)));
1392 else if (TREE_CODE (t2) == NEGATE_EXPR)
1393 return build (MINUS_EXPR, type, convert (type, t1),
1394 convert (type, TREE_OPERAND (t2, 0)));
1395 else
1396 return build (code, type, convert (type, t1), convert (type, t2));
1397 }
1398
1399 return fold (build (code, type, convert (type, t1), convert (type, t2)));
1400 }
1401 \f
1402 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1403 to produce a new constant.
1404
1405 If NOTRUNC is nonzero, do not truncate the result to fit the data type.
1406 If FORSIZE is nonzero, compute overflow for unsigned types. */
1407
1408 static tree
1409 int_const_binop (code, arg1, arg2, notrunc, forsize)
1410 enum tree_code code;
1411 register tree arg1, arg2;
1412 int notrunc, forsize;
1413 {
1414 HOST_WIDE_INT int1l, int1h, int2l, int2h;
1415 HOST_WIDE_INT low, hi;
1416 HOST_WIDE_INT garbagel, garbageh;
1417 register tree t;
1418 int uns = TREE_UNSIGNED (TREE_TYPE (arg1));
1419 int overflow = 0;
1420 int no_overflow = 0;
1421
1422 int1l = TREE_INT_CST_LOW (arg1);
1423 int1h = TREE_INT_CST_HIGH (arg1);
1424 int2l = TREE_INT_CST_LOW (arg2);
1425 int2h = TREE_INT_CST_HIGH (arg2);
1426
1427 switch (code)
1428 {
1429 case BIT_IOR_EXPR:
1430 low = int1l | int2l, hi = int1h | int2h;
1431 break;
1432
1433 case BIT_XOR_EXPR:
1434 low = int1l ^ int2l, hi = int1h ^ int2h;
1435 break;
1436
1437 case BIT_AND_EXPR:
1438 low = int1l & int2l, hi = int1h & int2h;
1439 break;
1440
1441 case BIT_ANDTC_EXPR:
1442 low = int1l & ~int2l, hi = int1h & ~int2h;
1443 break;
1444
1445 case RSHIFT_EXPR:
1446 int2l = - int2l;
1447 case LSHIFT_EXPR:
1448 /* It's unclear from the C standard whether shifts can overflow.
1449 The following code ignores overflow; perhaps a C standard
1450 interpretation ruling is needed. */
1451 lshift_double (int1l, int1h, int2l,
1452 TYPE_PRECISION (TREE_TYPE (arg1)),
1453 &low, &hi,
1454 !uns);
1455 no_overflow = 1;
1456 break;
1457
1458 case RROTATE_EXPR:
1459 int2l = - int2l;
1460 case LROTATE_EXPR:
1461 lrotate_double (int1l, int1h, int2l,
1462 TYPE_PRECISION (TREE_TYPE (arg1)),
1463 &low, &hi);
1464 break;
1465
1466 case PLUS_EXPR:
1467 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1468 break;
1469
1470 case MINUS_EXPR:
1471 neg_double (int2l, int2h, &low, &hi);
1472 add_double (int1l, int1h, low, hi, &low, &hi);
1473 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1474 break;
1475
1476 case MULT_EXPR:
1477 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1478 break;
1479
1480 case TRUNC_DIV_EXPR:
1481 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1482 case EXACT_DIV_EXPR:
1483 /* This is a shortcut for a common special case. */
1484 if (int2h == 0 && int2l > 0
1485 && ! TREE_CONSTANT_OVERFLOW (arg1)
1486 && ! TREE_CONSTANT_OVERFLOW (arg2)
1487 && int1h == 0 && int1l >= 0)
1488 {
1489 if (code == CEIL_DIV_EXPR)
1490 int1l += int2l - 1;
1491 low = int1l / int2l, hi = 0;
1492 break;
1493 }
1494
1495 /* ... fall through ... */
1496
1497 case ROUND_DIV_EXPR:
1498 if (int2h == 0 && int2l == 1)
1499 {
1500 low = int1l, hi = int1h;
1501 break;
1502 }
1503 if (int1l == int2l && int1h == int2h
1504 && ! (int1l == 0 && int1h == 0))
1505 {
1506 low = 1, hi = 0;
1507 break;
1508 }
1509 overflow = div_and_round_double (code, uns,
1510 int1l, int1h, int2l, int2h,
1511 &low, &hi, &garbagel, &garbageh);
1512 break;
1513
1514 case TRUNC_MOD_EXPR:
1515 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1516 /* This is a shortcut for a common special case. */
1517 if (int2h == 0 && int2l > 0
1518 && ! TREE_CONSTANT_OVERFLOW (arg1)
1519 && ! TREE_CONSTANT_OVERFLOW (arg2)
1520 && int1h == 0 && int1l >= 0)
1521 {
1522 if (code == CEIL_MOD_EXPR)
1523 int1l += int2l - 1;
1524 low = int1l % int2l, hi = 0;
1525 break;
1526 }
1527
1528 /* ... fall through ... */
1529
1530 case ROUND_MOD_EXPR:
1531 overflow = div_and_round_double (code, uns,
1532 int1l, int1h, int2l, int2h,
1533 &garbagel, &garbageh, &low, &hi);
1534 break;
1535
1536 case MIN_EXPR:
1537 case MAX_EXPR:
1538 if (uns)
1539 low = (((unsigned HOST_WIDE_INT) int1h
1540 < (unsigned HOST_WIDE_INT) int2h)
1541 || (((unsigned HOST_WIDE_INT) int1h
1542 == (unsigned HOST_WIDE_INT) int2h)
1543 && ((unsigned HOST_WIDE_INT) int1l
1544 < (unsigned HOST_WIDE_INT) int2l)));
1545 else
1546 low = ((int1h < int2h)
1547 || ((int1h == int2h)
1548 && ((unsigned HOST_WIDE_INT) int1l
1549 < (unsigned HOST_WIDE_INT) int2l)));
1550
1551 if (low == (code == MIN_EXPR))
1552 low = int1l, hi = int1h;
1553 else
1554 low = int2l, hi = int2h;
1555 break;
1556
1557 default:
1558 abort ();
1559 }
1560
1561 if (TREE_TYPE (arg1) == sizetype && hi == 0
1562 && low >= 0
1563 && (TYPE_MAX_VALUE (sizetype) == NULL
1564 || low <= TREE_INT_CST_LOW (TYPE_MAX_VALUE (sizetype)))
1565 && ! overflow
1566 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1567 t = size_int (low);
1568 else
1569 {
1570 t = build_int_2 (low, hi);
1571 TREE_TYPE (t) = TREE_TYPE (arg1);
1572 }
1573
1574 TREE_OVERFLOW (t)
1575 = ((notrunc ? (!uns || forsize) && overflow
1576 : force_fit_type (t, (!uns || forsize) && overflow) && ! no_overflow)
1577 | TREE_OVERFLOW (arg1)
1578 | TREE_OVERFLOW (arg2));
1579
1580 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1581 So check if force_fit_type truncated the value. */
1582 if (forsize
1583 && ! TREE_OVERFLOW (t)
1584 && (TREE_INT_CST_HIGH (t) != hi
1585 || TREE_INT_CST_LOW (t) != low))
1586 TREE_OVERFLOW (t) = 1;
1587
1588 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1589 | TREE_CONSTANT_OVERFLOW (arg1)
1590 | TREE_CONSTANT_OVERFLOW (arg2));
1591 return t;
1592 }
1593
1594 /* Define input and output argument for const_binop_1. */
1595 struct cb_args
1596 {
1597 enum tree_code code; /* Input: tree code for operation*/
1598 tree type; /* Input: tree type for operation. */
1599 REAL_VALUE_TYPE d1, d2; /* Input: floating point operands. */
1600 tree t; /* Output: constant for result. */
1601 };
1602
1603 /* Do the real arithmetic for const_binop while protected by a
1604 float overflow handler. */
1605
1606 static void
1607 const_binop_1 (data)
1608 PTR data;
1609 {
1610 struct cb_args *args = (struct cb_args *) data;
1611 REAL_VALUE_TYPE value;
1612
1613 #ifdef REAL_ARITHMETIC
1614 REAL_ARITHMETIC (value, args->code, args->d1, args->d2);
1615 #else
1616 switch (args->code)
1617 {
1618 case PLUS_EXPR:
1619 value = args->d1 + args->d2;
1620 break;
1621
1622 case MINUS_EXPR:
1623 value = args->d1 - args->d2;
1624 break;
1625
1626 case MULT_EXPR:
1627 value = args->d1 * args->d2;
1628 break;
1629
1630 case RDIV_EXPR:
1631 #ifndef REAL_INFINITY
1632 if (args->d2 == 0)
1633 abort ();
1634 #endif
1635
1636 value = args->d1 / args->d2;
1637 break;
1638
1639 case MIN_EXPR:
1640 value = MIN (args->d1, args->d2);
1641 break;
1642
1643 case MAX_EXPR:
1644 value = MAX (args->d1, args->d2);
1645 break;
1646
1647 default:
1648 abort ();
1649 }
1650 #endif /* no REAL_ARITHMETIC */
1651
1652 args->t
1653 = build_real (args->type,
1654 real_value_truncate (TYPE_MODE (args->type), value));
1655 }
1656
1657 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1658 constant. We assume ARG1 and ARG2 have the same data type, or at least
1659 are the same kind of constant and the same machine mode.
1660
1661 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1662
1663 static tree
1664 const_binop (code, arg1, arg2, notrunc)
1665 enum tree_code code;
1666 register tree arg1, arg2;
1667 int notrunc;
1668 {
1669 STRIP_NOPS (arg1); STRIP_NOPS (arg2);
1670
1671 if (TREE_CODE (arg1) == INTEGER_CST)
1672 return int_const_binop (code, arg1, arg2, notrunc, 0);
1673
1674 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1675 if (TREE_CODE (arg1) == REAL_CST)
1676 {
1677 REAL_VALUE_TYPE d1;
1678 REAL_VALUE_TYPE d2;
1679 int overflow = 0;
1680 tree t;
1681 struct cb_args args;
1682
1683 d1 = TREE_REAL_CST (arg1);
1684 d2 = TREE_REAL_CST (arg2);
1685
1686 /* If either operand is a NaN, just return it. Otherwise, set up
1687 for floating-point trap; we return an overflow. */
1688 if (REAL_VALUE_ISNAN (d1))
1689 return arg1;
1690 else if (REAL_VALUE_ISNAN (d2))
1691 return arg2;
1692
1693 /* Setup input for const_binop_1() */
1694 args.type = TREE_TYPE (arg1);
1695 args.d1 = d1;
1696 args.d2 = d2;
1697 args.code = code;
1698
1699 if (do_float_handler (const_binop_1, (PTR) &args))
1700 /* Receive output from const_binop_1. */
1701 t = args.t;
1702 else
1703 {
1704 /* We got an exception from const_binop_1. */
1705 t = copy_node (arg1);
1706 overflow = 1;
1707 }
1708
1709 TREE_OVERFLOW (t)
1710 = (force_fit_type (t, overflow)
1711 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1712 TREE_CONSTANT_OVERFLOW (t)
1713 = TREE_OVERFLOW (t)
1714 | TREE_CONSTANT_OVERFLOW (arg1)
1715 | TREE_CONSTANT_OVERFLOW (arg2);
1716 return t;
1717 }
1718 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1719 if (TREE_CODE (arg1) == COMPLEX_CST)
1720 {
1721 register tree type = TREE_TYPE (arg1);
1722 register tree r1 = TREE_REALPART (arg1);
1723 register tree i1 = TREE_IMAGPART (arg1);
1724 register tree r2 = TREE_REALPART (arg2);
1725 register tree i2 = TREE_IMAGPART (arg2);
1726 register tree t;
1727
1728 switch (code)
1729 {
1730 case PLUS_EXPR:
1731 t = build_complex (type,
1732 const_binop (PLUS_EXPR, r1, r2, notrunc),
1733 const_binop (PLUS_EXPR, i1, i2, notrunc));
1734 break;
1735
1736 case MINUS_EXPR:
1737 t = build_complex (type,
1738 const_binop (MINUS_EXPR, r1, r2, notrunc),
1739 const_binop (MINUS_EXPR, i1, i2, notrunc));
1740 break;
1741
1742 case MULT_EXPR:
1743 t = build_complex (type,
1744 const_binop (MINUS_EXPR,
1745 const_binop (MULT_EXPR,
1746 r1, r2, notrunc),
1747 const_binop (MULT_EXPR,
1748 i1, i2, notrunc),
1749 notrunc),
1750 const_binop (PLUS_EXPR,
1751 const_binop (MULT_EXPR,
1752 r1, i2, notrunc),
1753 const_binop (MULT_EXPR,
1754 i1, r2, notrunc),
1755 notrunc));
1756 break;
1757
1758 case RDIV_EXPR:
1759 {
1760 register tree magsquared
1761 = const_binop (PLUS_EXPR,
1762 const_binop (MULT_EXPR, r2, r2, notrunc),
1763 const_binop (MULT_EXPR, i2, i2, notrunc),
1764 notrunc);
1765
1766 t = build_complex (type,
1767 const_binop
1768 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1769 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1770 const_binop (PLUS_EXPR,
1771 const_binop (MULT_EXPR, r1, r2,
1772 notrunc),
1773 const_binop (MULT_EXPR, i1, i2,
1774 notrunc),
1775 notrunc),
1776 magsquared, notrunc),
1777 const_binop
1778 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1779 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1780 const_binop (MINUS_EXPR,
1781 const_binop (MULT_EXPR, i1, r2,
1782 notrunc),
1783 const_binop (MULT_EXPR, r1, i2,
1784 notrunc),
1785 notrunc),
1786 magsquared, notrunc));
1787 }
1788 break;
1789
1790 default:
1791 abort ();
1792 }
1793 return t;
1794 }
1795 return 0;
1796 }
1797 \f
1798 /* Return an INTEGER_CST with value whose HOST_BITS_PER_WIDE_INT bits are
1799 given by HIGH and whose HOST_BITS_PER_WIDE_INT bits are given by NUMBER.
1800
1801 If BIT_P is nonzero, this represents a size in bit and the type of the
1802 result will be bitsizetype, othewise it represents a size in bytes and
1803 the type of the result will be sizetype. */
1804
1805 tree
1806 size_int_wide (number, high, bit_p)
1807 unsigned HOST_WIDE_INT number, high;
1808 int bit_p;
1809 {
1810 /* Type-size nodes already made for small sizes. */
1811 static tree size_table[2 * HOST_BITS_PER_WIDE_INT + 1][2];
1812 static int init_p = 0;
1813 tree t;
1814
1815 if (ggc_p && ! init_p)
1816 {
1817 ggc_add_tree_root ((tree *) size_table,
1818 sizeof size_table / sizeof (tree));
1819 init_p = 1;
1820 }
1821
1822 if (number < 2*HOST_BITS_PER_WIDE_INT + 1 && high == 0
1823 && size_table[number][bit_p] != 0)
1824 return size_table[number][bit_p];
1825
1826 if (number < 2*HOST_BITS_PER_WIDE_INT + 1 && high == 0)
1827 {
1828 if (! ggc_p)
1829 {
1830 /* Make this a permanent node. */
1831 push_obstacks_nochange ();
1832 end_temporary_allocation ();
1833 }
1834
1835 t = build_int_2 (number, 0);
1836 TREE_TYPE (t) = bit_p ? bitsizetype : sizetype;
1837 size_table[number][bit_p] = t;
1838
1839 if (! ggc_p)
1840 pop_obstacks ();
1841
1842 return t;
1843 }
1844
1845 t = build_int_2 (number, high);
1846 TREE_TYPE (t) = bit_p ? bitsizetype : sizetype;
1847 TREE_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (t) = force_fit_type (t, 0);
1848 return t;
1849 }
1850
1851 /* Combine operands OP1 and OP2 with arithmetic operation CODE.
1852 CODE is a tree code. Data type is taken from `sizetype',
1853 If the operands are constant, so is the result. */
1854
1855 tree
1856 size_binop (code, arg0, arg1)
1857 enum tree_code code;
1858 tree arg0, arg1;
1859 {
1860 /* Handle the special case of two integer constants faster. */
1861 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1862 {
1863 /* And some specific cases even faster than that. */
1864 if (code == PLUS_EXPR && integer_zerop (arg0))
1865 return arg1;
1866 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1867 && integer_zerop (arg1))
1868 return arg0;
1869 else if (code == MULT_EXPR && integer_onep (arg0))
1870 return arg1;
1871
1872 /* Handle general case of two integer constants. */
1873 return int_const_binop (code, arg0, arg1, 0, 1);
1874 }
1875
1876 if (arg0 == error_mark_node || arg1 == error_mark_node)
1877 return error_mark_node;
1878
1879 return fold (build (code, sizetype, arg0, arg1));
1880 }
1881
1882 /* Combine operands OP1 and OP2 with arithmetic operation CODE.
1883 CODE is a tree code. Data type is taken from `ssizetype',
1884 If the operands are constant, so is the result. */
1885
1886 tree
1887 ssize_binop (code, arg0, arg1)
1888 enum tree_code code;
1889 tree arg0, arg1;
1890 {
1891 /* Handle the special case of two integer constants faster. */
1892 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1893 {
1894 /* And some specific cases even faster than that. */
1895 if (code == PLUS_EXPR && integer_zerop (arg0))
1896 return arg1;
1897 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1898 && integer_zerop (arg1))
1899 return arg0;
1900 else if (code == MULT_EXPR && integer_onep (arg0))
1901 return arg1;
1902
1903 /* Handle general case of two integer constants. We convert
1904 arg0 to ssizetype because int_const_binop uses its type for the
1905 return value. */
1906 arg0 = convert (ssizetype, arg0);
1907 return int_const_binop (code, arg0, arg1, 0, 0);
1908 }
1909
1910 if (arg0 == error_mark_node || arg1 == error_mark_node)
1911 return error_mark_node;
1912
1913 return fold (build (code, ssizetype, arg0, arg1));
1914 }
1915 \f
1916 /* This structure is used to communicate arguments to fold_convert_1. */
1917 struct fc_args
1918 {
1919 tree arg1; /* Input: value to convert. */
1920 tree type; /* Input: type to convert value to. */
1921 tree t; /* Ouput: result of conversion. */
1922 };
1923
1924 /* Function to convert floating-point constants, protected by floating
1925 point exception handler. */
1926
1927 static void
1928 fold_convert_1 (data)
1929 PTR data;
1930 {
1931 struct fc_args * args = (struct fc_args *) data;
1932
1933 args->t = build_real (args->type,
1934 real_value_truncate (TYPE_MODE (args->type),
1935 TREE_REAL_CST (args->arg1)));
1936 }
1937
1938 /* Given T, a tree representing type conversion of ARG1, a constant,
1939 return a constant tree representing the result of conversion. */
1940
1941 static tree
1942 fold_convert (t, arg1)
1943 register tree t;
1944 register tree arg1;
1945 {
1946 register tree type = TREE_TYPE (t);
1947 int overflow = 0;
1948
1949 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1950 {
1951 if (TREE_CODE (arg1) == INTEGER_CST)
1952 {
1953 /* If we would build a constant wider than GCC supports,
1954 leave the conversion unfolded. */
1955 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1956 return t;
1957
1958 /* Given an integer constant, make new constant with new type,
1959 appropriately sign-extended or truncated. */
1960 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1961 TREE_INT_CST_HIGH (arg1));
1962 TREE_TYPE (t) = type;
1963 /* Indicate an overflow if (1) ARG1 already overflowed,
1964 or (2) force_fit_type indicates an overflow.
1965 Tell force_fit_type that an overflow has already occurred
1966 if ARG1 is a too-large unsigned value and T is signed.
1967 But don't indicate an overflow if converting a pointer. */
1968 TREE_OVERFLOW (t)
1969 = ((force_fit_type (t,
1970 (TREE_INT_CST_HIGH (arg1) < 0
1971 && (TREE_UNSIGNED (type)
1972 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1973 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1974 || TREE_OVERFLOW (arg1));
1975 TREE_CONSTANT_OVERFLOW (t)
1976 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1977 }
1978 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1979 else if (TREE_CODE (arg1) == REAL_CST)
1980 {
1981 /* Don't initialize these, use assignments.
1982 Initialized local aggregates don't work on old compilers. */
1983 REAL_VALUE_TYPE x;
1984 REAL_VALUE_TYPE l;
1985 REAL_VALUE_TYPE u;
1986 tree type1 = TREE_TYPE (arg1);
1987 int no_upper_bound;
1988
1989 x = TREE_REAL_CST (arg1);
1990 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1991
1992 no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
1993 if (!no_upper_bound)
1994 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1995
1996 /* See if X will be in range after truncation towards 0.
1997 To compensate for truncation, move the bounds away from 0,
1998 but reject if X exactly equals the adjusted bounds. */
1999 #ifdef REAL_ARITHMETIC
2000 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
2001 if (!no_upper_bound)
2002 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
2003 #else
2004 l--;
2005 if (!no_upper_bound)
2006 u++;
2007 #endif
2008 /* If X is a NaN, use zero instead and show we have an overflow.
2009 Otherwise, range check. */
2010 if (REAL_VALUE_ISNAN (x))
2011 overflow = 1, x = dconst0;
2012 else if (! (REAL_VALUES_LESS (l, x)
2013 && !no_upper_bound
2014 && REAL_VALUES_LESS (x, u)))
2015 overflow = 1;
2016
2017 #ifndef REAL_ARITHMETIC
2018 {
2019 HOST_WIDE_INT low, high;
2020 HOST_WIDE_INT half_word
2021 = (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2);
2022
2023 if (x < 0)
2024 x = -x;
2025
2026 high = (HOST_WIDE_INT) (x / half_word / half_word);
2027 x -= (REAL_VALUE_TYPE) high * half_word * half_word;
2028 if (x >= (REAL_VALUE_TYPE) half_word * half_word / 2)
2029 {
2030 low = x - (REAL_VALUE_TYPE) half_word * half_word / 2;
2031 low |= (HOST_WIDE_INT) -1 << (HOST_BITS_PER_WIDE_INT - 1);
2032 }
2033 else
2034 low = (HOST_WIDE_INT) x;
2035 if (TREE_REAL_CST (arg1) < 0)
2036 neg_double (low, high, &low, &high);
2037 t = build_int_2 (low, high);
2038 }
2039 #else
2040 {
2041 HOST_WIDE_INT low, high;
2042 REAL_VALUE_TO_INT (&low, &high, x);
2043 t = build_int_2 (low, high);
2044 }
2045 #endif
2046 TREE_TYPE (t) = type;
2047 TREE_OVERFLOW (t)
2048 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
2049 TREE_CONSTANT_OVERFLOW (t)
2050 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2051 }
2052 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
2053 TREE_TYPE (t) = type;
2054 }
2055 else if (TREE_CODE (type) == REAL_TYPE)
2056 {
2057 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
2058 if (TREE_CODE (arg1) == INTEGER_CST)
2059 return build_real_from_int_cst (type, arg1);
2060 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
2061 if (TREE_CODE (arg1) == REAL_CST)
2062 {
2063 struct fc_args args;
2064
2065 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
2066 {
2067 t = arg1;
2068 TREE_TYPE (arg1) = type;
2069 return t;
2070 }
2071
2072 /* Setup input for fold_convert_1() */
2073 args.arg1 = arg1;
2074 args.type = type;
2075
2076 if (do_float_handler (fold_convert_1, (PTR) &args))
2077 {
2078 /* Receive output from fold_convert_1() */
2079 t = args.t;
2080 }
2081 else
2082 {
2083 /* We got an exception from fold_convert_1() */
2084 overflow = 1;
2085 t = copy_node (arg1);
2086 }
2087
2088 TREE_OVERFLOW (t)
2089 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
2090 TREE_CONSTANT_OVERFLOW (t)
2091 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2092 return t;
2093 }
2094 }
2095 TREE_CONSTANT (t) = 1;
2096 return t;
2097 }
2098 \f
2099 /* Return an expr equal to X but certainly not valid as an lvalue. */
2100
2101 tree
2102 non_lvalue (x)
2103 tree x;
2104 {
2105 tree result;
2106
2107 /* These things are certainly not lvalues. */
2108 if (TREE_CODE (x) == NON_LVALUE_EXPR
2109 || TREE_CODE (x) == INTEGER_CST
2110 || TREE_CODE (x) == REAL_CST
2111 || TREE_CODE (x) == STRING_CST
2112 || TREE_CODE (x) == ADDR_EXPR)
2113 return x;
2114
2115 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2116 TREE_CONSTANT (result) = TREE_CONSTANT (x);
2117 return result;
2118 }
2119
2120 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2121 Zero means allow extended lvalues. */
2122
2123 int pedantic_lvalues;
2124
2125 /* When pedantic, return an expr equal to X but certainly not valid as a
2126 pedantic lvalue. Otherwise, return X. */
2127
2128 tree
2129 pedantic_non_lvalue (x)
2130 tree x;
2131 {
2132 if (pedantic_lvalues)
2133 return non_lvalue (x);
2134 else
2135 return x;
2136 }
2137 \f
2138 /* Given a tree comparison code, return the code that is the logical inverse
2139 of the given code. It is not safe to do this for floating-point
2140 comparisons, except for NE_EXPR and EQ_EXPR. */
2141
2142 static enum tree_code
2143 invert_tree_comparison (code)
2144 enum tree_code code;
2145 {
2146 switch (code)
2147 {
2148 case EQ_EXPR:
2149 return NE_EXPR;
2150 case NE_EXPR:
2151 return EQ_EXPR;
2152 case GT_EXPR:
2153 return LE_EXPR;
2154 case GE_EXPR:
2155 return LT_EXPR;
2156 case LT_EXPR:
2157 return GE_EXPR;
2158 case LE_EXPR:
2159 return GT_EXPR;
2160 default:
2161 abort ();
2162 }
2163 }
2164
2165 /* Similar, but return the comparison that results if the operands are
2166 swapped. This is safe for floating-point. */
2167
2168 static enum tree_code
2169 swap_tree_comparison (code)
2170 enum tree_code code;
2171 {
2172 switch (code)
2173 {
2174 case EQ_EXPR:
2175 case NE_EXPR:
2176 return code;
2177 case GT_EXPR:
2178 return LT_EXPR;
2179 case GE_EXPR:
2180 return LE_EXPR;
2181 case LT_EXPR:
2182 return GT_EXPR;
2183 case LE_EXPR:
2184 return GE_EXPR;
2185 default:
2186 abort ();
2187 }
2188 }
2189
2190 /* Return nonzero if CODE is a tree code that represents a truth value. */
2191
2192 static int
2193 truth_value_p (code)
2194 enum tree_code code;
2195 {
2196 return (TREE_CODE_CLASS (code) == '<'
2197 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2198 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2199 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2200 }
2201 \f
2202 /* Return nonzero if two operands are necessarily equal.
2203 If ONLY_CONST is non-zero, only return non-zero for constants.
2204 This function tests whether the operands are indistinguishable;
2205 it does not test whether they are equal using C's == operation.
2206 The distinction is important for IEEE floating point, because
2207 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2208 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
2209
2210 int
2211 operand_equal_p (arg0, arg1, only_const)
2212 tree arg0, arg1;
2213 int only_const;
2214 {
2215 /* If both types don't have the same signedness, then we can't consider
2216 them equal. We must check this before the STRIP_NOPS calls
2217 because they may change the signedness of the arguments. */
2218 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
2219 return 0;
2220
2221 STRIP_NOPS (arg0);
2222 STRIP_NOPS (arg1);
2223
2224 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2225 /* This is needed for conversions and for COMPONENT_REF.
2226 Might as well play it safe and always test this. */
2227 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2228 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2229 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2230 return 0;
2231
2232 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2233 We don't care about side effects in that case because the SAVE_EXPR
2234 takes care of that for us. In all other cases, two expressions are
2235 equal if they have no side effects. If we have two identical
2236 expressions with side effects that should be treated the same due
2237 to the only side effects being identical SAVE_EXPR's, that will
2238 be detected in the recursive calls below. */
2239 if (arg0 == arg1 && ! only_const
2240 && (TREE_CODE (arg0) == SAVE_EXPR
2241 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2242 return 1;
2243
2244 /* Next handle constant cases, those for which we can return 1 even
2245 if ONLY_CONST is set. */
2246 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2247 switch (TREE_CODE (arg0))
2248 {
2249 case INTEGER_CST:
2250 return (! TREE_CONSTANT_OVERFLOW (arg0)
2251 && ! TREE_CONSTANT_OVERFLOW (arg1)
2252 && TREE_INT_CST_LOW (arg0) == TREE_INT_CST_LOW (arg1)
2253 && TREE_INT_CST_HIGH (arg0) == TREE_INT_CST_HIGH (arg1));
2254
2255 case REAL_CST:
2256 return (! TREE_CONSTANT_OVERFLOW (arg0)
2257 && ! TREE_CONSTANT_OVERFLOW (arg1)
2258 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2259 TREE_REAL_CST (arg1)));
2260
2261 case COMPLEX_CST:
2262 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2263 only_const)
2264 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2265 only_const));
2266
2267 case STRING_CST:
2268 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2269 && ! memcmp (TREE_STRING_POINTER (arg0),
2270 TREE_STRING_POINTER (arg1),
2271 TREE_STRING_LENGTH (arg0)));
2272
2273 case ADDR_EXPR:
2274 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2275 0);
2276 default:
2277 break;
2278 }
2279
2280 if (only_const)
2281 return 0;
2282
2283 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2284 {
2285 case '1':
2286 /* Two conversions are equal only if signedness and modes match. */
2287 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2288 && (TREE_UNSIGNED (TREE_TYPE (arg0))
2289 != TREE_UNSIGNED (TREE_TYPE (arg1))))
2290 return 0;
2291
2292 return operand_equal_p (TREE_OPERAND (arg0, 0),
2293 TREE_OPERAND (arg1, 0), 0);
2294
2295 case '<':
2296 case '2':
2297 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2298 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2299 0))
2300 return 1;
2301
2302 /* For commutative ops, allow the other order. */
2303 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
2304 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
2305 || TREE_CODE (arg0) == BIT_IOR_EXPR
2306 || TREE_CODE (arg0) == BIT_XOR_EXPR
2307 || TREE_CODE (arg0) == BIT_AND_EXPR
2308 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
2309 && operand_equal_p (TREE_OPERAND (arg0, 0),
2310 TREE_OPERAND (arg1, 1), 0)
2311 && operand_equal_p (TREE_OPERAND (arg0, 1),
2312 TREE_OPERAND (arg1, 0), 0));
2313
2314 case 'r':
2315 /* If either of the pointer (or reference) expressions we are dereferencing
2316 contain a side effect, these cannot be equal. */
2317 if (TREE_SIDE_EFFECTS (arg0)
2318 || TREE_SIDE_EFFECTS (arg1))
2319 return 0;
2320
2321 switch (TREE_CODE (arg0))
2322 {
2323 case INDIRECT_REF:
2324 return operand_equal_p (TREE_OPERAND (arg0, 0),
2325 TREE_OPERAND (arg1, 0), 0);
2326
2327 case COMPONENT_REF:
2328 case ARRAY_REF:
2329 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2330 TREE_OPERAND (arg1, 0), 0)
2331 && operand_equal_p (TREE_OPERAND (arg0, 1),
2332 TREE_OPERAND (arg1, 1), 0));
2333
2334 case BIT_FIELD_REF:
2335 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2336 TREE_OPERAND (arg1, 0), 0)
2337 && operand_equal_p (TREE_OPERAND (arg0, 1),
2338 TREE_OPERAND (arg1, 1), 0)
2339 && operand_equal_p (TREE_OPERAND (arg0, 2),
2340 TREE_OPERAND (arg1, 2), 0));
2341 default:
2342 return 0;
2343 }
2344
2345 case 'e':
2346 if (TREE_CODE (arg0) == RTL_EXPR)
2347 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2348 return 0;
2349
2350 default:
2351 return 0;
2352 }
2353 }
2354 \f
2355 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2356 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2357
2358 When in doubt, return 0. */
2359
2360 static int
2361 operand_equal_for_comparison_p (arg0, arg1, other)
2362 tree arg0, arg1;
2363 tree other;
2364 {
2365 int unsignedp1, unsignedpo;
2366 tree primarg0, primarg1, primother;
2367 unsigned correct_width;
2368
2369 if (operand_equal_p (arg0, arg1, 0))
2370 return 1;
2371
2372 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2373 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2374 return 0;
2375
2376 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2377 and see if the inner values are the same. This removes any
2378 signedness comparison, which doesn't matter here. */
2379 primarg0 = arg0, primarg1 = arg1;
2380 STRIP_NOPS (primarg0); STRIP_NOPS (primarg1);
2381 if (operand_equal_p (primarg0, primarg1, 0))
2382 return 1;
2383
2384 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2385 actual comparison operand, ARG0.
2386
2387 First throw away any conversions to wider types
2388 already present in the operands. */
2389
2390 primarg1 = get_narrower (arg1, &unsignedp1);
2391 primother = get_narrower (other, &unsignedpo);
2392
2393 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2394 if (unsignedp1 == unsignedpo
2395 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2396 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2397 {
2398 tree type = TREE_TYPE (arg0);
2399
2400 /* Make sure shorter operand is extended the right way
2401 to match the longer operand. */
2402 primarg1 = convert (signed_or_unsigned_type (unsignedp1,
2403 TREE_TYPE (primarg1)),
2404 primarg1);
2405
2406 if (operand_equal_p (arg0, convert (type, primarg1), 0))
2407 return 1;
2408 }
2409
2410 return 0;
2411 }
2412 \f
2413 /* See if ARG is an expression that is either a comparison or is performing
2414 arithmetic on comparisons. The comparisons must only be comparing
2415 two different values, which will be stored in *CVAL1 and *CVAL2; if
2416 they are non-zero it means that some operands have already been found.
2417 No variables may be used anywhere else in the expression except in the
2418 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2419 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2420
2421 If this is true, return 1. Otherwise, return zero. */
2422
2423 static int
2424 twoval_comparison_p (arg, cval1, cval2, save_p)
2425 tree arg;
2426 tree *cval1, *cval2;
2427 int *save_p;
2428 {
2429 enum tree_code code = TREE_CODE (arg);
2430 char class = TREE_CODE_CLASS (code);
2431
2432 /* We can handle some of the 'e' cases here. */
2433 if (class == 'e' && code == TRUTH_NOT_EXPR)
2434 class = '1';
2435 else if (class == 'e'
2436 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2437 || code == COMPOUND_EXPR))
2438 class = '2';
2439
2440 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2441 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2442 {
2443 /* If we've already found a CVAL1 or CVAL2, this expression is
2444 two complex to handle. */
2445 if (*cval1 || *cval2)
2446 return 0;
2447
2448 class = '1';
2449 *save_p = 1;
2450 }
2451
2452 switch (class)
2453 {
2454 case '1':
2455 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2456
2457 case '2':
2458 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2459 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2460 cval1, cval2, save_p));
2461
2462 case 'c':
2463 return 1;
2464
2465 case 'e':
2466 if (code == COND_EXPR)
2467 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2468 cval1, cval2, save_p)
2469 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2470 cval1, cval2, save_p)
2471 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2472 cval1, cval2, save_p));
2473 return 0;
2474
2475 case '<':
2476 /* First see if we can handle the first operand, then the second. For
2477 the second operand, we know *CVAL1 can't be zero. It must be that
2478 one side of the comparison is each of the values; test for the
2479 case where this isn't true by failing if the two operands
2480 are the same. */
2481
2482 if (operand_equal_p (TREE_OPERAND (arg, 0),
2483 TREE_OPERAND (arg, 1), 0))
2484 return 0;
2485
2486 if (*cval1 == 0)
2487 *cval1 = TREE_OPERAND (arg, 0);
2488 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2489 ;
2490 else if (*cval2 == 0)
2491 *cval2 = TREE_OPERAND (arg, 0);
2492 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2493 ;
2494 else
2495 return 0;
2496
2497 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2498 ;
2499 else if (*cval2 == 0)
2500 *cval2 = TREE_OPERAND (arg, 1);
2501 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2502 ;
2503 else
2504 return 0;
2505
2506 return 1;
2507
2508 default:
2509 return 0;
2510 }
2511 }
2512 \f
2513 /* ARG is a tree that is known to contain just arithmetic operations and
2514 comparisons. Evaluate the operations in the tree substituting NEW0 for
2515 any occurrence of OLD0 as an operand of a comparison and likewise for
2516 NEW1 and OLD1. */
2517
2518 static tree
2519 eval_subst (arg, old0, new0, old1, new1)
2520 tree arg;
2521 tree old0, new0, old1, new1;
2522 {
2523 tree type = TREE_TYPE (arg);
2524 enum tree_code code = TREE_CODE (arg);
2525 char class = TREE_CODE_CLASS (code);
2526
2527 /* We can handle some of the 'e' cases here. */
2528 if (class == 'e' && code == TRUTH_NOT_EXPR)
2529 class = '1';
2530 else if (class == 'e'
2531 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2532 class = '2';
2533
2534 switch (class)
2535 {
2536 case '1':
2537 return fold (build1 (code, type,
2538 eval_subst (TREE_OPERAND (arg, 0),
2539 old0, new0, old1, new1)));
2540
2541 case '2':
2542 return fold (build (code, type,
2543 eval_subst (TREE_OPERAND (arg, 0),
2544 old0, new0, old1, new1),
2545 eval_subst (TREE_OPERAND (arg, 1),
2546 old0, new0, old1, new1)));
2547
2548 case 'e':
2549 switch (code)
2550 {
2551 case SAVE_EXPR:
2552 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2553
2554 case COMPOUND_EXPR:
2555 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2556
2557 case COND_EXPR:
2558 return fold (build (code, type,
2559 eval_subst (TREE_OPERAND (arg, 0),
2560 old0, new0, old1, new1),
2561 eval_subst (TREE_OPERAND (arg, 1),
2562 old0, new0, old1, new1),
2563 eval_subst (TREE_OPERAND (arg, 2),
2564 old0, new0, old1, new1)));
2565 default:
2566 break;
2567 }
2568 /* fall through - ??? */
2569
2570 case '<':
2571 {
2572 tree arg0 = TREE_OPERAND (arg, 0);
2573 tree arg1 = TREE_OPERAND (arg, 1);
2574
2575 /* We need to check both for exact equality and tree equality. The
2576 former will be true if the operand has a side-effect. In that
2577 case, we know the operand occurred exactly once. */
2578
2579 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2580 arg0 = new0;
2581 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2582 arg0 = new1;
2583
2584 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2585 arg1 = new0;
2586 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2587 arg1 = new1;
2588
2589 return fold (build (code, type, arg0, arg1));
2590 }
2591
2592 default:
2593 return arg;
2594 }
2595 }
2596 \f
2597 /* Return a tree for the case when the result of an expression is RESULT
2598 converted to TYPE and OMITTED was previously an operand of the expression
2599 but is now not needed (e.g., we folded OMITTED * 0).
2600
2601 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2602 the conversion of RESULT to TYPE. */
2603
2604 static tree
2605 omit_one_operand (type, result, omitted)
2606 tree type, result, omitted;
2607 {
2608 tree t = convert (type, result);
2609
2610 if (TREE_SIDE_EFFECTS (omitted))
2611 return build (COMPOUND_EXPR, type, omitted, t);
2612
2613 return non_lvalue (t);
2614 }
2615
2616 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2617
2618 static tree
2619 pedantic_omit_one_operand (type, result, omitted)
2620 tree type, result, omitted;
2621 {
2622 tree t = convert (type, result);
2623
2624 if (TREE_SIDE_EFFECTS (omitted))
2625 return build (COMPOUND_EXPR, type, omitted, t);
2626
2627 return pedantic_non_lvalue (t);
2628 }
2629
2630
2631 \f
2632 /* Return a simplified tree node for the truth-negation of ARG. This
2633 never alters ARG itself. We assume that ARG is an operation that
2634 returns a truth value (0 or 1). */
2635
2636 tree
2637 invert_truthvalue (arg)
2638 tree arg;
2639 {
2640 tree type = TREE_TYPE (arg);
2641 enum tree_code code = TREE_CODE (arg);
2642
2643 if (code == ERROR_MARK)
2644 return arg;
2645
2646 /* If this is a comparison, we can simply invert it, except for
2647 floating-point non-equality comparisons, in which case we just
2648 enclose a TRUTH_NOT_EXPR around what we have. */
2649
2650 if (TREE_CODE_CLASS (code) == '<')
2651 {
2652 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2653 && !flag_fast_math && code != NE_EXPR && code != EQ_EXPR)
2654 return build1 (TRUTH_NOT_EXPR, type, arg);
2655 else
2656 return build (invert_tree_comparison (code), type,
2657 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2658 }
2659
2660 switch (code)
2661 {
2662 case INTEGER_CST:
2663 return convert (type, build_int_2 (TREE_INT_CST_LOW (arg) == 0
2664 && TREE_INT_CST_HIGH (arg) == 0, 0));
2665
2666 case TRUTH_AND_EXPR:
2667 return build (TRUTH_OR_EXPR, type,
2668 invert_truthvalue (TREE_OPERAND (arg, 0)),
2669 invert_truthvalue (TREE_OPERAND (arg, 1)));
2670
2671 case TRUTH_OR_EXPR:
2672 return build (TRUTH_AND_EXPR, type,
2673 invert_truthvalue (TREE_OPERAND (arg, 0)),
2674 invert_truthvalue (TREE_OPERAND (arg, 1)));
2675
2676 case TRUTH_XOR_EXPR:
2677 /* Here we can invert either operand. We invert the first operand
2678 unless the second operand is a TRUTH_NOT_EXPR in which case our
2679 result is the XOR of the first operand with the inside of the
2680 negation of the second operand. */
2681
2682 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2683 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2684 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2685 else
2686 return build (TRUTH_XOR_EXPR, type,
2687 invert_truthvalue (TREE_OPERAND (arg, 0)),
2688 TREE_OPERAND (arg, 1));
2689
2690 case TRUTH_ANDIF_EXPR:
2691 return build (TRUTH_ORIF_EXPR, type,
2692 invert_truthvalue (TREE_OPERAND (arg, 0)),
2693 invert_truthvalue (TREE_OPERAND (arg, 1)));
2694
2695 case TRUTH_ORIF_EXPR:
2696 return build (TRUTH_ANDIF_EXPR, type,
2697 invert_truthvalue (TREE_OPERAND (arg, 0)),
2698 invert_truthvalue (TREE_OPERAND (arg, 1)));
2699
2700 case TRUTH_NOT_EXPR:
2701 return TREE_OPERAND (arg, 0);
2702
2703 case COND_EXPR:
2704 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2705 invert_truthvalue (TREE_OPERAND (arg, 1)),
2706 invert_truthvalue (TREE_OPERAND (arg, 2)));
2707
2708 case COMPOUND_EXPR:
2709 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2710 invert_truthvalue (TREE_OPERAND (arg, 1)));
2711
2712 case WITH_RECORD_EXPR:
2713 return build (WITH_RECORD_EXPR, type,
2714 invert_truthvalue (TREE_OPERAND (arg, 0)),
2715 TREE_OPERAND (arg, 1));
2716
2717 case NON_LVALUE_EXPR:
2718 return invert_truthvalue (TREE_OPERAND (arg, 0));
2719
2720 case NOP_EXPR:
2721 case CONVERT_EXPR:
2722 case FLOAT_EXPR:
2723 return build1 (TREE_CODE (arg), type,
2724 invert_truthvalue (TREE_OPERAND (arg, 0)));
2725
2726 case BIT_AND_EXPR:
2727 if (!integer_onep (TREE_OPERAND (arg, 1)))
2728 break;
2729 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2730
2731 case SAVE_EXPR:
2732 return build1 (TRUTH_NOT_EXPR, type, arg);
2733
2734 case CLEANUP_POINT_EXPR:
2735 return build1 (CLEANUP_POINT_EXPR, type,
2736 invert_truthvalue (TREE_OPERAND (arg, 0)));
2737
2738 default:
2739 break;
2740 }
2741 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2742 abort ();
2743 return build1 (TRUTH_NOT_EXPR, type, arg);
2744 }
2745
2746 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2747 operands are another bit-wise operation with a common input. If so,
2748 distribute the bit operations to save an operation and possibly two if
2749 constants are involved. For example, convert
2750 (A | B) & (A | C) into A | (B & C)
2751 Further simplification will occur if B and C are constants.
2752
2753 If this optimization cannot be done, 0 will be returned. */
2754
2755 static tree
2756 distribute_bit_expr (code, type, arg0, arg1)
2757 enum tree_code code;
2758 tree type;
2759 tree arg0, arg1;
2760 {
2761 tree common;
2762 tree left, right;
2763
2764 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2765 || TREE_CODE (arg0) == code
2766 || (TREE_CODE (arg0) != BIT_AND_EXPR
2767 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2768 return 0;
2769
2770 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2771 {
2772 common = TREE_OPERAND (arg0, 0);
2773 left = TREE_OPERAND (arg0, 1);
2774 right = TREE_OPERAND (arg1, 1);
2775 }
2776 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2777 {
2778 common = TREE_OPERAND (arg0, 0);
2779 left = TREE_OPERAND (arg0, 1);
2780 right = TREE_OPERAND (arg1, 0);
2781 }
2782 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2783 {
2784 common = TREE_OPERAND (arg0, 1);
2785 left = TREE_OPERAND (arg0, 0);
2786 right = TREE_OPERAND (arg1, 1);
2787 }
2788 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2789 {
2790 common = TREE_OPERAND (arg0, 1);
2791 left = TREE_OPERAND (arg0, 0);
2792 right = TREE_OPERAND (arg1, 0);
2793 }
2794 else
2795 return 0;
2796
2797 return fold (build (TREE_CODE (arg0), type, common,
2798 fold (build (code, type, left, right))));
2799 }
2800 \f
2801 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2802 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2803
2804 static tree
2805 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2806 tree inner;
2807 tree type;
2808 int bitsize, bitpos;
2809 int unsignedp;
2810 {
2811 tree result = build (BIT_FIELD_REF, type, inner,
2812 size_int (bitsize), bitsize_int (bitpos, 0L));
2813
2814 TREE_UNSIGNED (result) = unsignedp;
2815
2816 return result;
2817 }
2818
2819 /* Optimize a bit-field compare.
2820
2821 There are two cases: First is a compare against a constant and the
2822 second is a comparison of two items where the fields are at the same
2823 bit position relative to the start of a chunk (byte, halfword, word)
2824 large enough to contain it. In these cases we can avoid the shift
2825 implicit in bitfield extractions.
2826
2827 For constants, we emit a compare of the shifted constant with the
2828 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2829 compared. For two fields at the same position, we do the ANDs with the
2830 similar mask and compare the result of the ANDs.
2831
2832 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2833 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2834 are the left and right operands of the comparison, respectively.
2835
2836 If the optimization described above can be done, we return the resulting
2837 tree. Otherwise we return zero. */
2838
2839 static tree
2840 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2841 enum tree_code code;
2842 tree compare_type;
2843 tree lhs, rhs;
2844 {
2845 int lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2846 tree type = TREE_TYPE (lhs);
2847 tree signed_type, unsigned_type;
2848 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2849 enum machine_mode lmode, rmode, nmode;
2850 int lunsignedp, runsignedp;
2851 int lvolatilep = 0, rvolatilep = 0;
2852 int alignment;
2853 tree linner, rinner = NULL_TREE;
2854 tree mask;
2855 tree offset;
2856
2857 /* Get all the information about the extractions being done. If the bit size
2858 if the same as the size of the underlying object, we aren't doing an
2859 extraction at all and so can do nothing. We also don't want to
2860 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2861 then will no longer be able to replace it. */
2862 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2863 &lunsignedp, &lvolatilep, &alignment);
2864 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2865 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2866 return 0;
2867
2868 if (!const_p)
2869 {
2870 /* If this is not a constant, we can only do something if bit positions,
2871 sizes, and signedness are the same. */
2872 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2873 &runsignedp, &rvolatilep, &alignment);
2874
2875 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2876 || lunsignedp != runsignedp || offset != 0
2877 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2878 return 0;
2879 }
2880
2881 /* See if we can find a mode to refer to this field. We should be able to,
2882 but fail if we can't. */
2883 nmode = get_best_mode (lbitsize, lbitpos,
2884 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2885 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2886 TYPE_ALIGN (TREE_TYPE (rinner))),
2887 word_mode, lvolatilep || rvolatilep);
2888 if (nmode == VOIDmode)
2889 return 0;
2890
2891 /* Set signed and unsigned types of the precision of this mode for the
2892 shifts below. */
2893 signed_type = type_for_mode (nmode, 0);
2894 unsigned_type = type_for_mode (nmode, 1);
2895
2896 /* Compute the bit position and size for the new reference and our offset
2897 within it. If the new reference is the same size as the original, we
2898 won't optimize anything, so return zero. */
2899 nbitsize = GET_MODE_BITSIZE (nmode);
2900 nbitpos = lbitpos & ~ (nbitsize - 1);
2901 lbitpos -= nbitpos;
2902 if (nbitsize == lbitsize)
2903 return 0;
2904
2905 if (BYTES_BIG_ENDIAN)
2906 lbitpos = nbitsize - lbitsize - lbitpos;
2907
2908 /* Make the mask to be used against the extracted field. */
2909 mask = build_int_2 (~0, ~0);
2910 TREE_TYPE (mask) = unsigned_type;
2911 force_fit_type (mask, 0);
2912 mask = convert (unsigned_type, mask);
2913 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2914 mask = const_binop (RSHIFT_EXPR, mask,
2915 size_int (nbitsize - lbitsize - lbitpos), 0);
2916
2917 if (! const_p)
2918 /* If not comparing with constant, just rework the comparison
2919 and return. */
2920 return build (code, compare_type,
2921 build (BIT_AND_EXPR, unsigned_type,
2922 make_bit_field_ref (linner, unsigned_type,
2923 nbitsize, nbitpos, 1),
2924 mask),
2925 build (BIT_AND_EXPR, unsigned_type,
2926 make_bit_field_ref (rinner, unsigned_type,
2927 nbitsize, nbitpos, 1),
2928 mask));
2929
2930 /* Otherwise, we are handling the constant case. See if the constant is too
2931 big for the field. Warn and return a tree of for 0 (false) if so. We do
2932 this not only for its own sake, but to avoid having to test for this
2933 error case below. If we didn't, we might generate wrong code.
2934
2935 For unsigned fields, the constant shifted right by the field length should
2936 be all zero. For signed fields, the high-order bits should agree with
2937 the sign bit. */
2938
2939 if (lunsignedp)
2940 {
2941 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2942 convert (unsigned_type, rhs),
2943 size_int (lbitsize), 0)))
2944 {
2945 warning ("comparison is always %d due to width of bitfield",
2946 code == NE_EXPR);
2947 return convert (compare_type,
2948 (code == NE_EXPR
2949 ? integer_one_node : integer_zero_node));
2950 }
2951 }
2952 else
2953 {
2954 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2955 size_int (lbitsize - 1), 0);
2956 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2957 {
2958 warning ("comparison is always %d due to width of bitfield",
2959 code == NE_EXPR);
2960 return convert (compare_type,
2961 (code == NE_EXPR
2962 ? integer_one_node : integer_zero_node));
2963 }
2964 }
2965
2966 /* Single-bit compares should always be against zero. */
2967 if (lbitsize == 1 && ! integer_zerop (rhs))
2968 {
2969 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2970 rhs = convert (type, integer_zero_node);
2971 }
2972
2973 /* Make a new bitfield reference, shift the constant over the
2974 appropriate number of bits and mask it with the computed mask
2975 (in case this was a signed field). If we changed it, make a new one. */
2976 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2977 if (lvolatilep)
2978 {
2979 TREE_SIDE_EFFECTS (lhs) = 1;
2980 TREE_THIS_VOLATILE (lhs) = 1;
2981 }
2982
2983 rhs = fold (const_binop (BIT_AND_EXPR,
2984 const_binop (LSHIFT_EXPR,
2985 convert (unsigned_type, rhs),
2986 size_int (lbitpos), 0),
2987 mask, 0));
2988
2989 return build (code, compare_type,
2990 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2991 rhs);
2992 }
2993 \f
2994 /* Subroutine for fold_truthop: decode a field reference.
2995
2996 If EXP is a comparison reference, we return the innermost reference.
2997
2998 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2999 set to the starting bit number.
3000
3001 If the innermost field can be completely contained in a mode-sized
3002 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3003
3004 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3005 otherwise it is not changed.
3006
3007 *PUNSIGNEDP is set to the signedness of the field.
3008
3009 *PMASK is set to the mask used. This is either contained in a
3010 BIT_AND_EXPR or derived from the width of the field.
3011
3012 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3013
3014 Return 0 if this is not a component reference or is one that we can't
3015 do anything with. */
3016
3017 static tree
3018 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
3019 pvolatilep, pmask, pand_mask)
3020 tree exp;
3021 int *pbitsize, *pbitpos;
3022 enum machine_mode *pmode;
3023 int *punsignedp, *pvolatilep;
3024 tree *pmask;
3025 tree *pand_mask;
3026 {
3027 tree and_mask = 0;
3028 tree mask, inner, offset;
3029 tree unsigned_type;
3030 int precision;
3031 int alignment;
3032
3033 /* All the optimizations using this function assume integer fields.
3034 There are problems with FP fields since the type_for_size call
3035 below can fail for, e.g., XFmode. */
3036 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3037 return 0;
3038
3039 STRIP_NOPS (exp);
3040
3041 if (TREE_CODE (exp) == BIT_AND_EXPR)
3042 {
3043 and_mask = TREE_OPERAND (exp, 1);
3044 exp = TREE_OPERAND (exp, 0);
3045 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3046 if (TREE_CODE (and_mask) != INTEGER_CST)
3047 return 0;
3048 }
3049
3050
3051 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3052 punsignedp, pvolatilep, &alignment);
3053 if ((inner == exp && and_mask == 0)
3054 || *pbitsize < 0 || offset != 0
3055 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3056 return 0;
3057
3058 /* Compute the mask to access the bitfield. */
3059 unsigned_type = type_for_size (*pbitsize, 1);
3060 precision = TYPE_PRECISION (unsigned_type);
3061
3062 mask = build_int_2 (~0, ~0);
3063 TREE_TYPE (mask) = unsigned_type;
3064 force_fit_type (mask, 0);
3065 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3066 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3067
3068 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3069 if (and_mask != 0)
3070 mask = fold (build (BIT_AND_EXPR, unsigned_type,
3071 convert (unsigned_type, and_mask), mask));
3072
3073 *pmask = mask;
3074 *pand_mask = and_mask;
3075 return inner;
3076 }
3077
3078 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
3079 bit positions. */
3080
3081 static int
3082 all_ones_mask_p (mask, size)
3083 tree mask;
3084 int size;
3085 {
3086 tree type = TREE_TYPE (mask);
3087 int precision = TYPE_PRECISION (type);
3088 tree tmask;
3089
3090 tmask = build_int_2 (~0, ~0);
3091 TREE_TYPE (tmask) = signed_type (type);
3092 force_fit_type (tmask, 0);
3093 return
3094 tree_int_cst_equal (mask,
3095 const_binop (RSHIFT_EXPR,
3096 const_binop (LSHIFT_EXPR, tmask,
3097 size_int (precision - size),
3098 0),
3099 size_int (precision - size), 0));
3100 }
3101
3102 /* Subroutine for fold_truthop: determine if an operand is simple enough
3103 to be evaluated unconditionally. */
3104
3105 static int
3106 simple_operand_p (exp)
3107 tree exp;
3108 {
3109 /* Strip any conversions that don't change the machine mode. */
3110 while ((TREE_CODE (exp) == NOP_EXPR
3111 || TREE_CODE (exp) == CONVERT_EXPR)
3112 && (TYPE_MODE (TREE_TYPE (exp))
3113 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3114 exp = TREE_OPERAND (exp, 0);
3115
3116 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3117 || (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
3118 && ! TREE_ADDRESSABLE (exp)
3119 && ! TREE_THIS_VOLATILE (exp)
3120 && ! DECL_NONLOCAL (exp)
3121 /* Don't regard global variables as simple. They may be
3122 allocated in ways unknown to the compiler (shared memory,
3123 #pragma weak, etc). */
3124 && ! TREE_PUBLIC (exp)
3125 && ! DECL_EXTERNAL (exp)
3126 /* Loading a static variable is unduly expensive, but global
3127 registers aren't expensive. */
3128 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3129 }
3130 \f
3131 /* The following functions are subroutines to fold_range_test and allow it to
3132 try to change a logical combination of comparisons into a range test.
3133
3134 For example, both
3135 X == 2 && X == 3 && X == 4 && X == 5
3136 and
3137 X >= 2 && X <= 5
3138 are converted to
3139 (unsigned) (X - 2) <= 3
3140
3141 We describe each set of comparisons as being either inside or outside
3142 a range, using a variable named like IN_P, and then describe the
3143 range with a lower and upper bound. If one of the bounds is omitted,
3144 it represents either the highest or lowest value of the type.
3145
3146 In the comments below, we represent a range by two numbers in brackets
3147 preceded by a "+" to designate being inside that range, or a "-" to
3148 designate being outside that range, so the condition can be inverted by
3149 flipping the prefix. An omitted bound is represented by a "-". For
3150 example, "- [-, 10]" means being outside the range starting at the lowest
3151 possible value and ending at 10, in other words, being greater than 10.
3152 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3153 always false.
3154
3155 We set up things so that the missing bounds are handled in a consistent
3156 manner so neither a missing bound nor "true" and "false" need to be
3157 handled using a special case. */
3158
3159 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3160 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3161 and UPPER1_P are nonzero if the respective argument is an upper bound
3162 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3163 must be specified for a comparison. ARG1 will be converted to ARG0's
3164 type if both are specified. */
3165
3166 static tree
3167 range_binop (code, type, arg0, upper0_p, arg1, upper1_p)
3168 enum tree_code code;
3169 tree type;
3170 tree arg0, arg1;
3171 int upper0_p, upper1_p;
3172 {
3173 tree tem;
3174 int result;
3175 int sgn0, sgn1;
3176
3177 /* If neither arg represents infinity, do the normal operation.
3178 Else, if not a comparison, return infinity. Else handle the special
3179 comparison rules. Note that most of the cases below won't occur, but
3180 are handled for consistency. */
3181
3182 if (arg0 != 0 && arg1 != 0)
3183 {
3184 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
3185 arg0, convert (TREE_TYPE (arg0), arg1)));
3186 STRIP_NOPS (tem);
3187 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3188 }
3189
3190 if (TREE_CODE_CLASS (code) != '<')
3191 return 0;
3192
3193 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3194 for neither. In real maths, we cannot assume open ended ranges are
3195 the same. But, this is computer arithmetic, where numbers are finite.
3196 We can therefore make the transformation of any unbounded range with
3197 the value Z, Z being greater than any representable number. This permits
3198 us to treat unbounded ranges as equal. */
3199 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3200 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3201 switch (code)
3202 {
3203 case EQ_EXPR:
3204 result = sgn0 == sgn1;
3205 break;
3206 case NE_EXPR:
3207 result = sgn0 != sgn1;
3208 break;
3209 case LT_EXPR:
3210 result = sgn0 < sgn1;
3211 break;
3212 case LE_EXPR:
3213 result = sgn0 <= sgn1;
3214 break;
3215 case GT_EXPR:
3216 result = sgn0 > sgn1;
3217 break;
3218 case GE_EXPR:
3219 result = sgn0 >= sgn1;
3220 break;
3221 default:
3222 abort ();
3223 }
3224
3225 return convert (type, result ? integer_one_node : integer_zero_node);
3226 }
3227 \f
3228 /* Given EXP, a logical expression, set the range it is testing into
3229 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3230 actually being tested. *PLOW and *PHIGH will have be made the same type
3231 as the returned expression. If EXP is not a comparison, we will most
3232 likely not be returning a useful value and range. */
3233
3234 static tree
3235 make_range (exp, pin_p, plow, phigh)
3236 tree exp;
3237 int *pin_p;
3238 tree *plow, *phigh;
3239 {
3240 enum tree_code code;
3241 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3242 tree orig_type = NULL_TREE;
3243 int in_p, n_in_p;
3244 tree low, high, n_low, n_high;
3245
3246 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3247 and see if we can refine the range. Some of the cases below may not
3248 happen, but it doesn't seem worth worrying about this. We "continue"
3249 the outer loop when we've changed something; otherwise we "break"
3250 the switch, which will "break" the while. */
3251
3252 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
3253
3254 while (1)
3255 {
3256 code = TREE_CODE (exp);
3257
3258 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3259 {
3260 arg0 = TREE_OPERAND (exp, 0);
3261 if (TREE_CODE_CLASS (code) == '<'
3262 || TREE_CODE_CLASS (code) == '1'
3263 || TREE_CODE_CLASS (code) == '2')
3264 type = TREE_TYPE (arg0);
3265 if (TREE_CODE_CLASS (code) == '2'
3266 || TREE_CODE_CLASS (code) == '<'
3267 || (TREE_CODE_CLASS (code) == 'e'
3268 && tree_code_length[(int) code] > 1))
3269 arg1 = TREE_OPERAND (exp, 1);
3270 }
3271
3272 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3273 lose a cast by accident. */
3274 if (type != NULL_TREE && orig_type == NULL_TREE)
3275 orig_type = type;
3276
3277 switch (code)
3278 {
3279 case TRUTH_NOT_EXPR:
3280 in_p = ! in_p, exp = arg0;
3281 continue;
3282
3283 case EQ_EXPR: case NE_EXPR:
3284 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3285 /* We can only do something if the range is testing for zero
3286 and if the second operand is an integer constant. Note that
3287 saying something is "in" the range we make is done by
3288 complementing IN_P since it will set in the initial case of
3289 being not equal to zero; "out" is leaving it alone. */
3290 if (low == 0 || high == 0
3291 || ! integer_zerop (low) || ! integer_zerop (high)
3292 || TREE_CODE (arg1) != INTEGER_CST)
3293 break;
3294
3295 switch (code)
3296 {
3297 case NE_EXPR: /* - [c, c] */
3298 low = high = arg1;
3299 break;
3300 case EQ_EXPR: /* + [c, c] */
3301 in_p = ! in_p, low = high = arg1;
3302 break;
3303 case GT_EXPR: /* - [-, c] */
3304 low = 0, high = arg1;
3305 break;
3306 case GE_EXPR: /* + [c, -] */
3307 in_p = ! in_p, low = arg1, high = 0;
3308 break;
3309 case LT_EXPR: /* - [c, -] */
3310 low = arg1, high = 0;
3311 break;
3312 case LE_EXPR: /* + [-, c] */
3313 in_p = ! in_p, low = 0, high = arg1;
3314 break;
3315 default:
3316 abort ();
3317 }
3318
3319 exp = arg0;
3320
3321 /* If this is an unsigned comparison, we also know that EXP is
3322 greater than or equal to zero. We base the range tests we make
3323 on that fact, so we record it here so we can parse existing
3324 range tests. */
3325 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3326 {
3327 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3328 1, convert (type, integer_zero_node),
3329 NULL_TREE))
3330 break;
3331
3332 in_p = n_in_p, low = n_low, high = n_high;
3333
3334 /* If the high bound is missing, but we
3335 have a low bound, reverse the range so
3336 it goes from zero to the low bound minus 1. */
3337 if (high == 0 && low)
3338 {
3339 in_p = ! in_p;
3340 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3341 integer_one_node, 0);
3342 low = convert (type, integer_zero_node);
3343 }
3344 }
3345 continue;
3346
3347 case NEGATE_EXPR:
3348 /* (-x) IN [a,b] -> x in [-b, -a] */
3349 n_low = range_binop (MINUS_EXPR, type,
3350 convert (type, integer_zero_node), 0, high, 1);
3351 n_high = range_binop (MINUS_EXPR, type,
3352 convert (type, integer_zero_node), 0, low, 0);
3353 low = n_low, high = n_high;
3354 exp = arg0;
3355 continue;
3356
3357 case BIT_NOT_EXPR:
3358 /* ~ X -> -X - 1 */
3359 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3360 convert (type, integer_one_node));
3361 continue;
3362
3363 case PLUS_EXPR: case MINUS_EXPR:
3364 if (TREE_CODE (arg1) != INTEGER_CST)
3365 break;
3366
3367 /* If EXP is signed, any overflow in the computation is undefined,
3368 so we don't worry about it so long as our computations on
3369 the bounds don't overflow. For unsigned, overflow is defined
3370 and this is exactly the right thing. */
3371 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3372 type, low, 0, arg1, 0);
3373 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3374 type, high, 1, arg1, 0);
3375 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3376 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3377 break;
3378
3379 /* Check for an unsigned range which has wrapped around the maximum
3380 value thus making n_high < n_low, and normalize it. */
3381 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3382 {
3383 low = range_binop (PLUS_EXPR, type, n_high, 0,
3384 integer_one_node, 0);
3385 high = range_binop (MINUS_EXPR, type, n_low, 0,
3386 integer_one_node, 0);
3387 in_p = ! in_p;
3388 }
3389 else
3390 low = n_low, high = n_high;
3391
3392 exp = arg0;
3393 continue;
3394
3395 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3396 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3397 break;
3398
3399 if (! INTEGRAL_TYPE_P (type)
3400 || (low != 0 && ! int_fits_type_p (low, type))
3401 || (high != 0 && ! int_fits_type_p (high, type)))
3402 break;
3403
3404 n_low = low, n_high = high;
3405
3406 if (n_low != 0)
3407 n_low = convert (type, n_low);
3408
3409 if (n_high != 0)
3410 n_high = convert (type, n_high);
3411
3412 /* If we're converting from an unsigned to a signed type,
3413 we will be doing the comparison as unsigned. The tests above
3414 have already verified that LOW and HIGH are both positive.
3415
3416 So we have to make sure that the original unsigned value will
3417 be interpreted as positive. */
3418 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3419 {
3420 tree equiv_type = type_for_mode (TYPE_MODE (type), 1);
3421 tree high_positive;
3422
3423 /* A range without an upper bound is, naturally, unbounded.
3424 Since convert would have cropped a very large value, use
3425 the max value for the destination type. */
3426 high_positive
3427 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3428 : TYPE_MAX_VALUE (type);
3429
3430 high_positive = fold (build (RSHIFT_EXPR, type,
3431 convert (type, high_positive),
3432 convert (type, integer_one_node)));
3433
3434 /* If the low bound is specified, "and" the range with the
3435 range for which the original unsigned value will be
3436 positive. */
3437 if (low != 0)
3438 {
3439 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3440 1, n_low, n_high,
3441 1, convert (type, integer_zero_node),
3442 high_positive))
3443 break;
3444
3445 in_p = (n_in_p == in_p);
3446 }
3447 else
3448 {
3449 /* Otherwise, "or" the range with the range of the input
3450 that will be interpreted as negative. */
3451 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3452 0, n_low, n_high,
3453 1, convert (type, integer_zero_node),
3454 high_positive))
3455 break;
3456
3457 in_p = (in_p != n_in_p);
3458 }
3459 }
3460
3461 exp = arg0;
3462 low = n_low, high = n_high;
3463 continue;
3464
3465 default:
3466 break;
3467 }
3468
3469 break;
3470 }
3471
3472 /* If EXP is a constant, we can evaluate whether this is true or false. */
3473 if (TREE_CODE (exp) == INTEGER_CST)
3474 {
3475 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3476 exp, 0, low, 0))
3477 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3478 exp, 1, high, 1)));
3479 low = high = 0;
3480 exp = 0;
3481 }
3482
3483 *pin_p = in_p, *plow = low, *phigh = high;
3484 return exp;
3485 }
3486 \f
3487 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3488 type, TYPE, return an expression to test if EXP is in (or out of, depending
3489 on IN_P) the range. */
3490
3491 static tree
3492 build_range_check (type, exp, in_p, low, high)
3493 tree type;
3494 tree exp;
3495 int in_p;
3496 tree low, high;
3497 {
3498 tree etype = TREE_TYPE (exp);
3499 tree utype, value;
3500
3501 if (! in_p
3502 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3503 return invert_truthvalue (value);
3504
3505 else if (low == 0 && high == 0)
3506 return convert (type, integer_one_node);
3507
3508 else if (low == 0)
3509 return fold (build (LE_EXPR, type, exp, high));
3510
3511 else if (high == 0)
3512 return fold (build (GE_EXPR, type, exp, low));
3513
3514 else if (operand_equal_p (low, high, 0))
3515 return fold (build (EQ_EXPR, type, exp, low));
3516
3517 else if (TREE_UNSIGNED (etype) && integer_zerop (low))
3518 return build_range_check (type, exp, 1, 0, high);
3519
3520 else if (integer_zerop (low))
3521 {
3522 utype = unsigned_type (etype);
3523 return build_range_check (type, convert (utype, exp), 1, 0,
3524 convert (utype, high));
3525 }
3526
3527 else if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3528 && ! TREE_OVERFLOW (value))
3529 return build_range_check (type,
3530 fold (build (MINUS_EXPR, etype, exp, low)),
3531 1, convert (etype, integer_zero_node), value);
3532 else
3533 return 0;
3534 }
3535 \f
3536 /* Given two ranges, see if we can merge them into one. Return 1 if we
3537 can, 0 if we can't. Set the output range into the specified parameters. */
3538
3539 static int
3540 merge_ranges (pin_p, plow, phigh, in0_p, low0, high0, in1_p, low1, high1)
3541 int *pin_p;
3542 tree *plow, *phigh;
3543 int in0_p, in1_p;
3544 tree low0, high0, low1, high1;
3545 {
3546 int no_overlap;
3547 int subset;
3548 int temp;
3549 tree tem;
3550 int in_p;
3551 tree low, high;
3552 int lowequal = ((low0 == 0 && low1 == 0)
3553 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3554 low0, 0, low1, 0)));
3555 int highequal = ((high0 == 0 && high1 == 0)
3556 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3557 high0, 1, high1, 1)));
3558
3559 /* Make range 0 be the range that starts first, or ends last if they
3560 start at the same value. Swap them if it isn't. */
3561 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3562 low0, 0, low1, 0))
3563 || (lowequal
3564 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3565 high1, 1, high0, 1))))
3566 {
3567 temp = in0_p, in0_p = in1_p, in1_p = temp;
3568 tem = low0, low0 = low1, low1 = tem;
3569 tem = high0, high0 = high1, high1 = tem;
3570 }
3571
3572 /* Now flag two cases, whether the ranges are disjoint or whether the
3573 second range is totally subsumed in the first. Note that the tests
3574 below are simplified by the ones above. */
3575 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3576 high0, 1, low1, 0));
3577 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3578 high1, 1, high0, 1));
3579
3580 /* We now have four cases, depending on whether we are including or
3581 excluding the two ranges. */
3582 if (in0_p && in1_p)
3583 {
3584 /* If they don't overlap, the result is false. If the second range
3585 is a subset it is the result. Otherwise, the range is from the start
3586 of the second to the end of the first. */
3587 if (no_overlap)
3588 in_p = 0, low = high = 0;
3589 else if (subset)
3590 in_p = 1, low = low1, high = high1;
3591 else
3592 in_p = 1, low = low1, high = high0;
3593 }
3594
3595 else if (in0_p && ! in1_p)
3596 {
3597 /* If they don't overlap, the result is the first range. If they are
3598 equal, the result is false. If the second range is a subset of the
3599 first, and the ranges begin at the same place, we go from just after
3600 the end of the first range to the end of the second. If the second
3601 range is not a subset of the first, or if it is a subset and both
3602 ranges end at the same place, the range starts at the start of the
3603 first range and ends just before the second range.
3604 Otherwise, we can't describe this as a single range. */
3605 if (no_overlap)
3606 in_p = 1, low = low0, high = high0;
3607 else if (lowequal && highequal)
3608 in_p = 0, low = high = 0;
3609 else if (subset && lowequal)
3610 {
3611 in_p = 1, high = high0;
3612 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3613 integer_one_node, 0);
3614 }
3615 else if (! subset || highequal)
3616 {
3617 in_p = 1, low = low0;
3618 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3619 integer_one_node, 0);
3620 }
3621 else
3622 return 0;
3623 }
3624
3625 else if (! in0_p && in1_p)
3626 {
3627 /* If they don't overlap, the result is the second range. If the second
3628 is a subset of the first, the result is false. Otherwise,
3629 the range starts just after the first range and ends at the
3630 end of the second. */
3631 if (no_overlap)
3632 in_p = 1, low = low1, high = high1;
3633 else if (subset || highequal)
3634 in_p = 0, low = high = 0;
3635 else
3636 {
3637 in_p = 1, high = high1;
3638 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3639 integer_one_node, 0);
3640 }
3641 }
3642
3643 else
3644 {
3645 /* The case where we are excluding both ranges. Here the complex case
3646 is if they don't overlap. In that case, the only time we have a
3647 range is if they are adjacent. If the second is a subset of the
3648 first, the result is the first. Otherwise, the range to exclude
3649 starts at the beginning of the first range and ends at the end of the
3650 second. */
3651 if (no_overlap)
3652 {
3653 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3654 range_binop (PLUS_EXPR, NULL_TREE,
3655 high0, 1,
3656 integer_one_node, 1),
3657 1, low1, 0)))
3658 in_p = 0, low = low0, high = high1;
3659 else
3660 return 0;
3661 }
3662 else if (subset)
3663 in_p = 0, low = low0, high = high0;
3664 else
3665 in_p = 0, low = low0, high = high1;
3666 }
3667
3668 *pin_p = in_p, *plow = low, *phigh = high;
3669 return 1;
3670 }
3671 \f
3672 /* EXP is some logical combination of boolean tests. See if we can
3673 merge it into some range test. Return the new tree if so. */
3674
3675 static tree
3676 fold_range_test (exp)
3677 tree exp;
3678 {
3679 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3680 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3681 int in0_p, in1_p, in_p;
3682 tree low0, low1, low, high0, high1, high;
3683 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3684 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3685 tree tem;
3686
3687 /* If this is an OR operation, invert both sides; we will invert
3688 again at the end. */
3689 if (or_op)
3690 in0_p = ! in0_p, in1_p = ! in1_p;
3691
3692 /* If both expressions are the same, if we can merge the ranges, and we
3693 can build the range test, return it or it inverted. If one of the
3694 ranges is always true or always false, consider it to be the same
3695 expression as the other. */
3696 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3697 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3698 in1_p, low1, high1)
3699 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3700 lhs != 0 ? lhs
3701 : rhs != 0 ? rhs : integer_zero_node,
3702 in_p, low, high))))
3703 return or_op ? invert_truthvalue (tem) : tem;
3704
3705 /* On machines where the branch cost is expensive, if this is a
3706 short-circuited branch and the underlying object on both sides
3707 is the same, make a non-short-circuit operation. */
3708 else if (BRANCH_COST >= 2
3709 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3710 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3711 && operand_equal_p (lhs, rhs, 0))
3712 {
3713 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3714 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3715 which cases we can't do this. */
3716 if (simple_operand_p (lhs))
3717 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3718 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3719 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3720 TREE_OPERAND (exp, 1));
3721
3722 else if (global_bindings_p () == 0
3723 && ! contains_placeholder_p (lhs))
3724 {
3725 tree common = save_expr (lhs);
3726
3727 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3728 or_op ? ! in0_p : in0_p,
3729 low0, high0))
3730 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3731 or_op ? ! in1_p : in1_p,
3732 low1, high1))))
3733 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3734 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3735 TREE_TYPE (exp), lhs, rhs);
3736 }
3737 }
3738
3739 return 0;
3740 }
3741 \f
3742 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3743 bit value. Arrange things so the extra bits will be set to zero if and
3744 only if C is signed-extended to its full width. If MASK is nonzero,
3745 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3746
3747 static tree
3748 unextend (c, p, unsignedp, mask)
3749 tree c;
3750 int p;
3751 int unsignedp;
3752 tree mask;
3753 {
3754 tree type = TREE_TYPE (c);
3755 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3756 tree temp;
3757
3758 if (p == modesize || unsignedp)
3759 return c;
3760
3761 /* We work by getting just the sign bit into the low-order bit, then
3762 into the high-order bit, then sign-extend. We then XOR that value
3763 with C. */
3764 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3765 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3766
3767 /* We must use a signed type in order to get an arithmetic right shift.
3768 However, we must also avoid introducing accidental overflows, so that
3769 a subsequent call to integer_zerop will work. Hence we must
3770 do the type conversion here. At this point, the constant is either
3771 zero or one, and the conversion to a signed type can never overflow.
3772 We could get an overflow if this conversion is done anywhere else. */
3773 if (TREE_UNSIGNED (type))
3774 temp = convert (signed_type (type), temp);
3775
3776 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3777 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3778 if (mask != 0)
3779 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3780 /* If necessary, convert the type back to match the type of C. */
3781 if (TREE_UNSIGNED (type))
3782 temp = convert (type, temp);
3783
3784 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3785 }
3786 \f
3787 /* Find ways of folding logical expressions of LHS and RHS:
3788 Try to merge two comparisons to the same innermost item.
3789 Look for range tests like "ch >= '0' && ch <= '9'".
3790 Look for combinations of simple terms on machines with expensive branches
3791 and evaluate the RHS unconditionally.
3792
3793 For example, if we have p->a == 2 && p->b == 4 and we can make an
3794 object large enough to span both A and B, we can do this with a comparison
3795 against the object ANDed with the a mask.
3796
3797 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3798 operations to do this with one comparison.
3799
3800 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3801 function and the one above.
3802
3803 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3804 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3805
3806 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3807 two operands.
3808
3809 We return the simplified tree or 0 if no optimization is possible. */
3810
3811 static tree
3812 fold_truthop (code, truth_type, lhs, rhs)
3813 enum tree_code code;
3814 tree truth_type, lhs, rhs;
3815 {
3816 /* If this is the "or" of two comparisons, we can do something if we
3817 the comparisons are NE_EXPR. If this is the "and", we can do something
3818 if the comparisons are EQ_EXPR. I.e.,
3819 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3820
3821 WANTED_CODE is this operation code. For single bit fields, we can
3822 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3823 comparison for one-bit fields. */
3824
3825 enum tree_code wanted_code;
3826 enum tree_code lcode, rcode;
3827 tree ll_arg, lr_arg, rl_arg, rr_arg;
3828 tree ll_inner, lr_inner, rl_inner, rr_inner;
3829 int ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3830 int rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3831 int xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3832 int lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3833 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3834 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3835 enum machine_mode lnmode, rnmode;
3836 tree ll_mask, lr_mask, rl_mask, rr_mask;
3837 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3838 tree l_const, r_const;
3839 tree lntype, rntype, result;
3840 int first_bit, end_bit;
3841 int volatilep;
3842
3843 /* Start by getting the comparison codes. Fail if anything is volatile.
3844 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3845 it were surrounded with a NE_EXPR. */
3846
3847 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3848 return 0;
3849
3850 lcode = TREE_CODE (lhs);
3851 rcode = TREE_CODE (rhs);
3852
3853 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3854 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3855
3856 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3857 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3858
3859 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3860 return 0;
3861
3862 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3863 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3864
3865 ll_arg = TREE_OPERAND (lhs, 0);
3866 lr_arg = TREE_OPERAND (lhs, 1);
3867 rl_arg = TREE_OPERAND (rhs, 0);
3868 rr_arg = TREE_OPERAND (rhs, 1);
3869
3870 /* If the RHS can be evaluated unconditionally and its operands are
3871 simple, it wins to evaluate the RHS unconditionally on machines
3872 with expensive branches. In this case, this isn't a comparison
3873 that can be merged. Avoid doing this if the RHS is a floating-point
3874 comparison since those can trap. */
3875
3876 if (BRANCH_COST >= 2
3877 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3878 && simple_operand_p (rl_arg)
3879 && simple_operand_p (rr_arg))
3880 return build (code, truth_type, lhs, rhs);
3881
3882 /* See if the comparisons can be merged. Then get all the parameters for
3883 each side. */
3884
3885 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3886 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3887 return 0;
3888
3889 volatilep = 0;
3890 ll_inner = decode_field_reference (ll_arg,
3891 &ll_bitsize, &ll_bitpos, &ll_mode,
3892 &ll_unsignedp, &volatilep, &ll_mask,
3893 &ll_and_mask);
3894 lr_inner = decode_field_reference (lr_arg,
3895 &lr_bitsize, &lr_bitpos, &lr_mode,
3896 &lr_unsignedp, &volatilep, &lr_mask,
3897 &lr_and_mask);
3898 rl_inner = decode_field_reference (rl_arg,
3899 &rl_bitsize, &rl_bitpos, &rl_mode,
3900 &rl_unsignedp, &volatilep, &rl_mask,
3901 &rl_and_mask);
3902 rr_inner = decode_field_reference (rr_arg,
3903 &rr_bitsize, &rr_bitpos, &rr_mode,
3904 &rr_unsignedp, &volatilep, &rr_mask,
3905 &rr_and_mask);
3906
3907 /* It must be true that the inner operation on the lhs of each
3908 comparison must be the same if we are to be able to do anything.
3909 Then see if we have constants. If not, the same must be true for
3910 the rhs's. */
3911 if (volatilep || ll_inner == 0 || rl_inner == 0
3912 || ! operand_equal_p (ll_inner, rl_inner, 0))
3913 return 0;
3914
3915 if (TREE_CODE (lr_arg) == INTEGER_CST
3916 && TREE_CODE (rr_arg) == INTEGER_CST)
3917 l_const = lr_arg, r_const = rr_arg;
3918 else if (lr_inner == 0 || rr_inner == 0
3919 || ! operand_equal_p (lr_inner, rr_inner, 0))
3920 return 0;
3921 else
3922 l_const = r_const = 0;
3923
3924 /* If either comparison code is not correct for our logical operation,
3925 fail. However, we can convert a one-bit comparison against zero into
3926 the opposite comparison against that bit being set in the field. */
3927
3928 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3929 if (lcode != wanted_code)
3930 {
3931 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3932 {
3933 /* Make the left operand unsigned, since we are only interested
3934 in the value of one bit. Otherwise we are doing the wrong
3935 thing below. */
3936 ll_unsignedp = 1;
3937 l_const = ll_mask;
3938 }
3939 else
3940 return 0;
3941 }
3942
3943 /* This is analogous to the code for l_const above. */
3944 if (rcode != wanted_code)
3945 {
3946 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3947 {
3948 rl_unsignedp = 1;
3949 r_const = rl_mask;
3950 }
3951 else
3952 return 0;
3953 }
3954
3955 /* See if we can find a mode that contains both fields being compared on
3956 the left. If we can't, fail. Otherwise, update all constants and masks
3957 to be relative to a field of that size. */
3958 first_bit = MIN (ll_bitpos, rl_bitpos);
3959 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3960 lnmode = get_best_mode (end_bit - first_bit, first_bit,
3961 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3962 volatilep);
3963 if (lnmode == VOIDmode)
3964 return 0;
3965
3966 lnbitsize = GET_MODE_BITSIZE (lnmode);
3967 lnbitpos = first_bit & ~ (lnbitsize - 1);
3968 lntype = type_for_size (lnbitsize, 1);
3969 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3970
3971 if (BYTES_BIG_ENDIAN)
3972 {
3973 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3974 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3975 }
3976
3977 ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
3978 size_int (xll_bitpos), 0);
3979 rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
3980 size_int (xrl_bitpos), 0);
3981
3982 if (l_const)
3983 {
3984 l_const = convert (lntype, l_const);
3985 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3986 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3987 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3988 fold (build1 (BIT_NOT_EXPR,
3989 lntype, ll_mask)),
3990 0)))
3991 {
3992 warning ("comparison is always %d", wanted_code == NE_EXPR);
3993
3994 return convert (truth_type,
3995 wanted_code == NE_EXPR
3996 ? integer_one_node : integer_zero_node);
3997 }
3998 }
3999 if (r_const)
4000 {
4001 r_const = convert (lntype, r_const);
4002 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4003 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4004 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4005 fold (build1 (BIT_NOT_EXPR,
4006 lntype, rl_mask)),
4007 0)))
4008 {
4009 warning ("comparison is always %d", wanted_code == NE_EXPR);
4010
4011 return convert (truth_type,
4012 wanted_code == NE_EXPR
4013 ? integer_one_node : integer_zero_node);
4014 }
4015 }
4016
4017 /* If the right sides are not constant, do the same for it. Also,
4018 disallow this optimization if a size or signedness mismatch occurs
4019 between the left and right sides. */
4020 if (l_const == 0)
4021 {
4022 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4023 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4024 /* Make sure the two fields on the right
4025 correspond to the left without being swapped. */
4026 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4027 return 0;
4028
4029 first_bit = MIN (lr_bitpos, rr_bitpos);
4030 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4031 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4032 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4033 volatilep);
4034 if (rnmode == VOIDmode)
4035 return 0;
4036
4037 rnbitsize = GET_MODE_BITSIZE (rnmode);
4038 rnbitpos = first_bit & ~ (rnbitsize - 1);
4039 rntype = type_for_size (rnbitsize, 1);
4040 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4041
4042 if (BYTES_BIG_ENDIAN)
4043 {
4044 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4045 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4046 }
4047
4048 lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
4049 size_int (xlr_bitpos), 0);
4050 rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
4051 size_int (xrr_bitpos), 0);
4052
4053 /* Make a mask that corresponds to both fields being compared.
4054 Do this for both items being compared. If the operands are the
4055 same size and the bits being compared are in the same position
4056 then we can do this by masking both and comparing the masked
4057 results. */
4058 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4059 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4060 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4061 {
4062 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4063 ll_unsignedp || rl_unsignedp);
4064 if (! all_ones_mask_p (ll_mask, lnbitsize))
4065 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
4066
4067 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4068 lr_unsignedp || rr_unsignedp);
4069 if (! all_ones_mask_p (lr_mask, rnbitsize))
4070 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
4071
4072 return build (wanted_code, truth_type, lhs, rhs);
4073 }
4074
4075 /* There is still another way we can do something: If both pairs of
4076 fields being compared are adjacent, we may be able to make a wider
4077 field containing them both.
4078
4079 Note that we still must mask the lhs/rhs expressions. Furthermore,
4080 the mask must be shifted to account for the shift done by
4081 make_bit_field_ref. */
4082 if ((ll_bitsize + ll_bitpos == rl_bitpos
4083 && lr_bitsize + lr_bitpos == rr_bitpos)
4084 || (ll_bitpos == rl_bitpos + rl_bitsize
4085 && lr_bitpos == rr_bitpos + rr_bitsize))
4086 {
4087 tree type;
4088
4089 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4090 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4091 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4092 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4093
4094 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4095 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4096 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4097 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4098
4099 /* Convert to the smaller type before masking out unwanted bits. */
4100 type = lntype;
4101 if (lntype != rntype)
4102 {
4103 if (lnbitsize > rnbitsize)
4104 {
4105 lhs = convert (rntype, lhs);
4106 ll_mask = convert (rntype, ll_mask);
4107 type = rntype;
4108 }
4109 else if (lnbitsize < rnbitsize)
4110 {
4111 rhs = convert (lntype, rhs);
4112 lr_mask = convert (lntype, lr_mask);
4113 type = lntype;
4114 }
4115 }
4116
4117 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4118 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
4119
4120 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4121 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
4122
4123 return build (wanted_code, truth_type, lhs, rhs);
4124 }
4125
4126 return 0;
4127 }
4128
4129 /* Handle the case of comparisons with constants. If there is something in
4130 common between the masks, those bits of the constants must be the same.
4131 If not, the condition is always false. Test for this to avoid generating
4132 incorrect code below. */
4133 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4134 if (! integer_zerop (result)
4135 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4136 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4137 {
4138 if (wanted_code == NE_EXPR)
4139 {
4140 warning ("`or' of unmatched not-equal tests is always 1");
4141 return convert (truth_type, integer_one_node);
4142 }
4143 else
4144 {
4145 warning ("`and' of mutually exclusive equal-tests is always 0");
4146 return convert (truth_type, integer_zero_node);
4147 }
4148 }
4149
4150 /* Construct the expression we will return. First get the component
4151 reference we will make. Unless the mask is all ones the width of
4152 that field, perform the mask operation. Then compare with the
4153 merged constant. */
4154 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4155 ll_unsignedp || rl_unsignedp);
4156
4157 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4158 if (! all_ones_mask_p (ll_mask, lnbitsize))
4159 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
4160
4161 return build (wanted_code, truth_type, result,
4162 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4163 }
4164 \f
4165 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4166 constant. */
4167
4168 static tree
4169 optimize_minmax_comparison (t)
4170 tree t;
4171 {
4172 tree type = TREE_TYPE (t);
4173 tree arg0 = TREE_OPERAND (t, 0);
4174 enum tree_code op_code;
4175 tree comp_const = TREE_OPERAND (t, 1);
4176 tree minmax_const;
4177 int consts_equal, consts_lt;
4178 tree inner;
4179
4180 STRIP_SIGN_NOPS (arg0);
4181
4182 op_code = TREE_CODE (arg0);
4183 minmax_const = TREE_OPERAND (arg0, 1);
4184 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4185 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4186 inner = TREE_OPERAND (arg0, 0);
4187
4188 /* If something does not permit us to optimize, return the original tree. */
4189 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4190 || TREE_CODE (comp_const) != INTEGER_CST
4191 || TREE_CONSTANT_OVERFLOW (comp_const)
4192 || TREE_CODE (minmax_const) != INTEGER_CST
4193 || TREE_CONSTANT_OVERFLOW (minmax_const))
4194 return t;
4195
4196 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4197 and GT_EXPR, doing the rest with recursive calls using logical
4198 simplifications. */
4199 switch (TREE_CODE (t))
4200 {
4201 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4202 return
4203 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4204
4205 case GE_EXPR:
4206 return
4207 fold (build (TRUTH_ORIF_EXPR, type,
4208 optimize_minmax_comparison
4209 (build (EQ_EXPR, type, arg0, comp_const)),
4210 optimize_minmax_comparison
4211 (build (GT_EXPR, type, arg0, comp_const))));
4212
4213 case EQ_EXPR:
4214 if (op_code == MAX_EXPR && consts_equal)
4215 /* MAX (X, 0) == 0 -> X <= 0 */
4216 return fold (build (LE_EXPR, type, inner, comp_const));
4217
4218 else if (op_code == MAX_EXPR && consts_lt)
4219 /* MAX (X, 0) == 5 -> X == 5 */
4220 return fold (build (EQ_EXPR, type, inner, comp_const));
4221
4222 else if (op_code == MAX_EXPR)
4223 /* MAX (X, 0) == -1 -> false */
4224 return omit_one_operand (type, integer_zero_node, inner);
4225
4226 else if (consts_equal)
4227 /* MIN (X, 0) == 0 -> X >= 0 */
4228 return fold (build (GE_EXPR, type, inner, comp_const));
4229
4230 else if (consts_lt)
4231 /* MIN (X, 0) == 5 -> false */
4232 return omit_one_operand (type, integer_zero_node, inner);
4233
4234 else
4235 /* MIN (X, 0) == -1 -> X == -1 */
4236 return fold (build (EQ_EXPR, type, inner, comp_const));
4237
4238 case GT_EXPR:
4239 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4240 /* MAX (X, 0) > 0 -> X > 0
4241 MAX (X, 0) > 5 -> X > 5 */
4242 return fold (build (GT_EXPR, type, inner, comp_const));
4243
4244 else if (op_code == MAX_EXPR)
4245 /* MAX (X, 0) > -1 -> true */
4246 return omit_one_operand (type, integer_one_node, inner);
4247
4248 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4249 /* MIN (X, 0) > 0 -> false
4250 MIN (X, 0) > 5 -> false */
4251 return omit_one_operand (type, integer_zero_node, inner);
4252
4253 else
4254 /* MIN (X, 0) > -1 -> X > -1 */
4255 return fold (build (GT_EXPR, type, inner, comp_const));
4256
4257 default:
4258 return t;
4259 }
4260 }
4261 \f
4262 /* T is an integer expression that is being multiplied, divided, or taken a
4263 modulus (CODE says which and what kind of divide or modulus) by a
4264 constant C. See if we can eliminate that operation by folding it with
4265 other operations already in T. WIDE_TYPE, if non-null, is a type that
4266 should be used for the computation if wider than our type.
4267
4268 For example, if we are dividing (X * 8) + (Y + 16) by 4, we can return
4269 (X * 2) + (Y + 4). We also canonicalize (X + 7) * 4 into X * 4 + 28
4270 in the hope that either the machine has a multiply-accumulate insn
4271 or that this is part of an addressing calculation.
4272
4273 If we return a non-null expression, it is an equivalent form of the
4274 original computation, but need not be in the original type. */
4275
4276 static tree
4277 extract_muldiv (t, c, code, wide_type)
4278 tree t;
4279 tree c;
4280 enum tree_code code;
4281 tree wide_type;
4282 {
4283 tree type = TREE_TYPE (t);
4284 enum tree_code tcode = TREE_CODE (t);
4285 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4286 > GET_MODE_SIZE (TYPE_MODE (type)))
4287 ? wide_type : type);
4288 tree t1, t2;
4289 int same_p = tcode == code;
4290 tree op0 = NULL_TREE, op1 = NULL_TREE;
4291
4292 /* Don't deal with constants of zero here; they confuse the code below. */
4293 if (integer_zerop (c))
4294 return 0;
4295
4296 if (TREE_CODE_CLASS (tcode) == '1')
4297 op0 = TREE_OPERAND (t, 0);
4298
4299 if (TREE_CODE_CLASS (tcode) == '2')
4300 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4301
4302 /* Note that we need not handle conditional operations here since fold
4303 already handles those cases. So just do arithmetic here. */
4304 switch (tcode)
4305 {
4306 case INTEGER_CST:
4307 /* For a constant, we can always simplify if we are a multiply
4308 or (for divide and modulus) if it is a multiple of our constant. */
4309 if (code == MULT_EXPR
4310 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4311 return const_binop (code, convert (ctype, t), convert (ctype, c), 0);
4312 break;
4313
4314 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4315
4316 /* Pass the constant down and see if we can make a simplification. If
4317 we can, replace this expression with the inner simplification for
4318 possible later conversion to our or some other type. */
4319 if (0 != (t1 = extract_muldiv (op0, convert (TREE_TYPE (op0), c), code,
4320 code == MULT_EXPR ? ctype : NULL_TREE)))
4321 return t1;
4322 break;
4323
4324 case NEGATE_EXPR: case ABS_EXPR:
4325 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4326 return fold (build1 (tcode, ctype, convert (ctype, t1)));
4327 break;
4328
4329 case MIN_EXPR: case MAX_EXPR:
4330 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4331 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4332 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4333 {
4334 if (tree_int_cst_sgn (c) < 0)
4335 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4336
4337 return fold (build (tcode, ctype, convert (ctype, t1),
4338 convert (ctype, t2)));
4339 }
4340 break;
4341
4342 case WITH_RECORD_EXPR:
4343 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4344 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4345 TREE_OPERAND (t, 1));
4346 break;
4347
4348 case SAVE_EXPR:
4349 /* If this has not been evaluated and the operand has no side effects,
4350 we can see if we can do something inside it and make a new one.
4351 Note that this test is overly conservative since we can do this
4352 if the only reason it had side effects is that it was another
4353 similar SAVE_EXPR, but that isn't worth bothering with. */
4354 if (SAVE_EXPR_RTL (t) == 0 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0))
4355 && 0 != (t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code,
4356 wide_type)))
4357 return save_expr (t1);
4358 break;
4359
4360 case LSHIFT_EXPR: case RSHIFT_EXPR:
4361 /* If the second operand is constant, this is a multiplication
4362 or floor division, by a power of two, so we can treat it that
4363 way unless the multiplier or divisor overflows. */
4364 if (TREE_CODE (op1) == INTEGER_CST
4365 && 0 != (t1 = convert (ctype,
4366 const_binop (LSHIFT_EXPR, size_one_node,
4367 op1, 0)))
4368 && ! TREE_OVERFLOW (t1))
4369 return extract_muldiv (build (tcode == LSHIFT_EXPR
4370 ? MULT_EXPR : FLOOR_DIV_EXPR,
4371 ctype, convert (ctype, op0), t1),
4372 c, code, wide_type);
4373 break;
4374
4375 case PLUS_EXPR: case MINUS_EXPR:
4376 /* See if we can eliminate the operation on both sides. If we can, we
4377 can return a new PLUS or MINUS. If we can't, the only remaining
4378 cases where we can do anything are if the second operand is a
4379 constant. */
4380 t1 = extract_muldiv (op0, c, code, wide_type);
4381 t2 = extract_muldiv (op1, c, code, wide_type);
4382 if (t1 != 0 && t2 != 0)
4383 return fold (build (tcode, ctype, convert (ctype, t1),
4384 convert (ctype, t2)));
4385
4386 /* If this was a subtraction, negate OP1 and set it to be an addition.
4387 This simplifies the logic below. */
4388 if (tcode == MINUS_EXPR)
4389 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4390
4391 if (TREE_CODE (op1) != INTEGER_CST)
4392 break;
4393
4394 /* If either OP1 or C are negative, this optimization is not safe for
4395 some of the division and remainder types while for others we need
4396 to change the code. */
4397 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4398 {
4399 if (code == CEIL_DIV_EXPR)
4400 code = FLOOR_DIV_EXPR;
4401 else if (code == CEIL_MOD_EXPR)
4402 code = FLOOR_MOD_EXPR;
4403 else if (code == FLOOR_DIV_EXPR)
4404 code = CEIL_DIV_EXPR;
4405 else if (code == FLOOR_MOD_EXPR)
4406 code = CEIL_MOD_EXPR;
4407 else if (code != MULT_EXPR)
4408 break;
4409 }
4410
4411 /* Now do the operation and verify it doesn't overflow. */
4412 op1 = const_binop (code, convert (ctype, op1), convert (ctype, c), 0);
4413 if (op1 == 0 || TREE_OVERFLOW (op1))
4414 break;
4415
4416 /* If we were able to eliminate our operation from the first side,
4417 apply our operation to the second side and reform the PLUS. */
4418 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4419 return fold (build (tcode, ctype, convert (ctype, t1), op1));
4420
4421 /* The last case is if we are a multiply. In that case, we can
4422 apply the distributive law to commute the multiply and addition
4423 if the multiplication of the constants doesn't overflow. */
4424 if (code == MULT_EXPR)
4425 return fold (build (tcode, ctype, fold (build (code, ctype,
4426 convert (ctype, op0),
4427 convert (ctype, c))),
4428 op1));
4429
4430 break;
4431
4432 case MULT_EXPR:
4433 /* We have a special case here if we are doing something like
4434 (C * 8) % 4 since we know that's zero. */
4435 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4436 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4437 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4438 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4439 return omit_one_operand (type, integer_zero_node, op0);
4440
4441 /* ... fall through ... */
4442
4443 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4444 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4445 /* If we can extract our operation from the LHS, do so and return a
4446 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4447 do something only if the second operand is a constant. */
4448 if (same_p
4449 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4450 return fold (build (tcode, ctype, convert (ctype, t1),
4451 convert (ctype, op1)));
4452 else if (tcode == MULT_EXPR && code == MULT_EXPR
4453 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4454 return fold (build (tcode, ctype, convert (ctype, op0),
4455 convert (ctype, t1)));
4456 else if (TREE_CODE (op1) != INTEGER_CST)
4457 return 0;
4458
4459 /* If these are the same operation types, we can associate them
4460 assuming no overflow. */
4461 if (tcode == code
4462 && 0 != (t1 = const_binop (MULT_EXPR, convert (ctype, op1),
4463 convert (ctype, c), 0))
4464 && ! TREE_OVERFLOW (t1))
4465 return fold (build (tcode, ctype, convert (ctype, op0), t1));
4466
4467 /* If these operations "cancel" each other, we have the main
4468 optimizations of this pass, which occur when either constant is a
4469 multiple of the other, in which case we replace this with either an
4470 operation or CODE or TCODE. */
4471 if ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4472 || (tcode == MULT_EXPR
4473 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4474 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR))
4475 {
4476 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4477 return fold (build (tcode, ctype, convert (ctype, op0),
4478 convert (ctype,
4479 const_binop (TRUNC_DIV_EXPR,
4480 op1, c, 0))));
4481 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4482 return fold (build (code, ctype, convert (ctype, op0),
4483 convert (ctype,
4484 const_binop (TRUNC_DIV_EXPR,
4485 c, op1, 0))));
4486 }
4487 break;
4488
4489 default:
4490 break;
4491 }
4492
4493 return 0;
4494 }
4495 \f
4496 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4497 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4498 that we may sometimes modify the tree. */
4499
4500 static tree
4501 strip_compound_expr (t, s)
4502 tree t;
4503 tree s;
4504 {
4505 enum tree_code code = TREE_CODE (t);
4506
4507 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4508 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4509 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4510 return TREE_OPERAND (t, 1);
4511
4512 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4513 don't bother handling any other types. */
4514 else if (code == COND_EXPR)
4515 {
4516 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4517 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4518 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4519 }
4520 else if (TREE_CODE_CLASS (code) == '1')
4521 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4522 else if (TREE_CODE_CLASS (code) == '<'
4523 || TREE_CODE_CLASS (code) == '2')
4524 {
4525 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4526 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4527 }
4528
4529 return t;
4530 }
4531 \f
4532 /* Return a node which has the indicated constant VALUE (either 0 or
4533 1), and is of the indicated TYPE. */
4534
4535 static tree
4536 constant_boolean_node (value, type)
4537 int value;
4538 tree type;
4539 {
4540 if (type == integer_type_node)
4541 return value ? integer_one_node : integer_zero_node;
4542 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4543 return truthvalue_conversion (value ? integer_one_node :
4544 integer_zero_node);
4545 else
4546 {
4547 tree t = build_int_2 (value, 0);
4548
4549 TREE_TYPE (t) = type;
4550 return t;
4551 }
4552 }
4553
4554 /* Utility function for the following routine, to see how complex a nesting of
4555 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4556 we don't care (to avoid spending too much time on complex expressions.). */
4557
4558 static int
4559 count_cond (expr, lim)
4560 tree expr;
4561 int lim;
4562 {
4563 int true, false;
4564
4565 if (TREE_CODE (expr) != COND_EXPR)
4566 return 0;
4567 else if (lim <= 0)
4568 return 0;
4569
4570 true = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4571 false = count_cond (TREE_OPERAND (expr, 2), lim - 1 - true);
4572 return MIN (lim, 1 + true + false);
4573 }
4574 \f
4575 /* Perform constant folding and related simplification of EXPR.
4576 The related simplifications include x*1 => x, x*0 => 0, etc.,
4577 and application of the associative law.
4578 NOP_EXPR conversions may be removed freely (as long as we
4579 are careful not to change the C type of the overall expression)
4580 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4581 but we can constant-fold them if they have constant operands. */
4582
4583 tree
4584 fold (expr)
4585 tree expr;
4586 {
4587 register tree t = expr;
4588 tree t1 = NULL_TREE;
4589 tree tem;
4590 tree type = TREE_TYPE (expr);
4591 register tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4592 register enum tree_code code = TREE_CODE (t);
4593 register int kind;
4594 int invert;
4595 /* WINS will be nonzero when the switch is done
4596 if all operands are constant. */
4597 int wins = 1;
4598
4599 /* Don't try to process an RTL_EXPR since its operands aren't trees.
4600 Likewise for a SAVE_EXPR that's already been evaluated. */
4601 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t)) != 0)
4602 return t;
4603
4604 /* Return right away if already constant. */
4605 if (TREE_CONSTANT (t))
4606 {
4607 if (code == CONST_DECL)
4608 return DECL_INITIAL (t);
4609 return t;
4610 }
4611
4612 #ifdef MAX_INTEGER_COMPUTATION_MODE
4613 check_max_integer_computation_mode (expr);
4614 #endif
4615
4616 kind = TREE_CODE_CLASS (code);
4617 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
4618 {
4619 tree subop;
4620
4621 /* Special case for conversion ops that can have fixed point args. */
4622 arg0 = TREE_OPERAND (t, 0);
4623
4624 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
4625 if (arg0 != 0)
4626 STRIP_SIGN_NOPS (arg0);
4627
4628 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
4629 subop = TREE_REALPART (arg0);
4630 else
4631 subop = arg0;
4632
4633 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
4634 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4635 && TREE_CODE (subop) != REAL_CST
4636 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4637 )
4638 /* Note that TREE_CONSTANT isn't enough:
4639 static var addresses are constant but we can't
4640 do arithmetic on them. */
4641 wins = 0;
4642 }
4643 else if (kind == 'e' || kind == '<'
4644 || kind == '1' || kind == '2' || kind == 'r')
4645 {
4646 register int len = tree_code_length[(int) code];
4647 register int i;
4648 for (i = 0; i < len; i++)
4649 {
4650 tree op = TREE_OPERAND (t, i);
4651 tree subop;
4652
4653 if (op == 0)
4654 continue; /* Valid for CALL_EXPR, at least. */
4655
4656 if (kind == '<' || code == RSHIFT_EXPR)
4657 {
4658 /* Signedness matters here. Perhaps we can refine this
4659 later. */
4660 STRIP_SIGN_NOPS (op);
4661 }
4662 else
4663 {
4664 /* Strip any conversions that don't change the mode. */
4665 STRIP_NOPS (op);
4666 }
4667
4668 if (TREE_CODE (op) == COMPLEX_CST)
4669 subop = TREE_REALPART (op);
4670 else
4671 subop = op;
4672
4673 if (TREE_CODE (subop) != INTEGER_CST
4674 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4675 && TREE_CODE (subop) != REAL_CST
4676 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4677 )
4678 /* Note that TREE_CONSTANT isn't enough:
4679 static var addresses are constant but we can't
4680 do arithmetic on them. */
4681 wins = 0;
4682
4683 if (i == 0)
4684 arg0 = op;
4685 else if (i == 1)
4686 arg1 = op;
4687 }
4688 }
4689
4690 /* If this is a commutative operation, and ARG0 is a constant, move it
4691 to ARG1 to reduce the number of tests below. */
4692 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
4693 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
4694 || code == BIT_AND_EXPR)
4695 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
4696 {
4697 tem = arg0; arg0 = arg1; arg1 = tem;
4698
4699 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
4700 TREE_OPERAND (t, 1) = tem;
4701 }
4702
4703 /* Now WINS is set as described above,
4704 ARG0 is the first operand of EXPR,
4705 and ARG1 is the second operand (if it has more than one operand).
4706
4707 First check for cases where an arithmetic operation is applied to a
4708 compound, conditional, or comparison operation. Push the arithmetic
4709 operation inside the compound or conditional to see if any folding
4710 can then be done. Convert comparison to conditional for this purpose.
4711 The also optimizes non-constant cases that used to be done in
4712 expand_expr.
4713
4714 Before we do that, see if this is a BIT_AND_EXPR or a BIT_OR_EXPR,
4715 one of the operands is a comparison and the other is a comparison, a
4716 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
4717 code below would make the expression more complex. Change it to a
4718 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
4719 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
4720
4721 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
4722 || code == EQ_EXPR || code == NE_EXPR)
4723 && ((truth_value_p (TREE_CODE (arg0))
4724 && (truth_value_p (TREE_CODE (arg1))
4725 || (TREE_CODE (arg1) == BIT_AND_EXPR
4726 && integer_onep (TREE_OPERAND (arg1, 1)))))
4727 || (truth_value_p (TREE_CODE (arg1))
4728 && (truth_value_p (TREE_CODE (arg0))
4729 || (TREE_CODE (arg0) == BIT_AND_EXPR
4730 && integer_onep (TREE_OPERAND (arg0, 1)))))))
4731 {
4732 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
4733 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
4734 : TRUTH_XOR_EXPR,
4735 type, arg0, arg1));
4736
4737 if (code == EQ_EXPR)
4738 t = invert_truthvalue (t);
4739
4740 return t;
4741 }
4742
4743 if (TREE_CODE_CLASS (code) == '1')
4744 {
4745 if (TREE_CODE (arg0) == COMPOUND_EXPR)
4746 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4747 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
4748 else if (TREE_CODE (arg0) == COND_EXPR)
4749 {
4750 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
4751 fold (build1 (code, type, TREE_OPERAND (arg0, 1))),
4752 fold (build1 (code, type, TREE_OPERAND (arg0, 2)))));
4753
4754 /* If this was a conversion, and all we did was to move into
4755 inside the COND_EXPR, bring it back out. But leave it if
4756 it is a conversion from integer to integer and the
4757 result precision is no wider than a word since such a
4758 conversion is cheap and may be optimized away by combine,
4759 while it couldn't if it were outside the COND_EXPR. Then return
4760 so we don't get into an infinite recursion loop taking the
4761 conversion out and then back in. */
4762
4763 if ((code == NOP_EXPR || code == CONVERT_EXPR
4764 || code == NON_LVALUE_EXPR)
4765 && TREE_CODE (t) == COND_EXPR
4766 && TREE_CODE (TREE_OPERAND (t, 1)) == code
4767 && TREE_CODE (TREE_OPERAND (t, 2)) == code
4768 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
4769 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
4770 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
4771 && (INTEGRAL_TYPE_P
4772 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
4773 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
4774 t = build1 (code, type,
4775 build (COND_EXPR,
4776 TREE_TYPE (TREE_OPERAND
4777 (TREE_OPERAND (t, 1), 0)),
4778 TREE_OPERAND (t, 0),
4779 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
4780 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
4781 return t;
4782 }
4783 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
4784 return fold (build (COND_EXPR, type, arg0,
4785 fold (build1 (code, type, integer_one_node)),
4786 fold (build1 (code, type, integer_zero_node))));
4787 }
4788 else if (TREE_CODE_CLASS (code) == '2'
4789 || TREE_CODE_CLASS (code) == '<')
4790 {
4791 if (TREE_CODE (arg1) == COMPOUND_EXPR)
4792 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4793 fold (build (code, type,
4794 arg0, TREE_OPERAND (arg1, 1))));
4795 else if ((TREE_CODE (arg1) == COND_EXPR
4796 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
4797 && TREE_CODE_CLASS (code) != '<'))
4798 && (TREE_CODE (arg0) != COND_EXPR
4799 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4800 && (! TREE_SIDE_EFFECTS (arg0)
4801 || (global_bindings_p () == 0
4802 && ! contains_placeholder_p (arg0))))
4803 {
4804 tree test, true_value, false_value;
4805 tree lhs = 0, rhs = 0;
4806
4807 if (TREE_CODE (arg1) == COND_EXPR)
4808 {
4809 test = TREE_OPERAND (arg1, 0);
4810 true_value = TREE_OPERAND (arg1, 1);
4811 false_value = TREE_OPERAND (arg1, 2);
4812 }
4813 else
4814 {
4815 tree testtype = TREE_TYPE (arg1);
4816 test = arg1;
4817 true_value = convert (testtype, integer_one_node);
4818 false_value = convert (testtype, integer_zero_node);
4819 }
4820
4821 /* If ARG0 is complex we want to make sure we only evaluate
4822 it once. Though this is only required if it is volatile, it
4823 might be more efficient even if it is not. However, if we
4824 succeed in folding one part to a constant, we do not need
4825 to make this SAVE_EXPR. Since we do this optimization
4826 primarily to see if we do end up with constant and this
4827 SAVE_EXPR interferes with later optimizations, suppressing
4828 it when we can is important.
4829
4830 If we are not in a function, we can't make a SAVE_EXPR, so don't
4831 try to do so. Don't try to see if the result is a constant
4832 if an arm is a COND_EXPR since we get exponential behavior
4833 in that case. */
4834
4835 if (TREE_CODE (arg0) != SAVE_EXPR && ! TREE_CONSTANT (arg0)
4836 && global_bindings_p () == 0
4837 && ((TREE_CODE (arg0) != VAR_DECL
4838 && TREE_CODE (arg0) != PARM_DECL)
4839 || TREE_SIDE_EFFECTS (arg0)))
4840 {
4841 if (TREE_CODE (true_value) != COND_EXPR)
4842 lhs = fold (build (code, type, arg0, true_value));
4843
4844 if (TREE_CODE (false_value) != COND_EXPR)
4845 rhs = fold (build (code, type, arg0, false_value));
4846
4847 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4848 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4849 arg0 = save_expr (arg0), lhs = rhs = 0;
4850 }
4851
4852 if (lhs == 0)
4853 lhs = fold (build (code, type, arg0, true_value));
4854 if (rhs == 0)
4855 rhs = fold (build (code, type, arg0, false_value));
4856
4857 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4858
4859 if (TREE_CODE (arg0) == SAVE_EXPR)
4860 return build (COMPOUND_EXPR, type,
4861 convert (void_type_node, arg0),
4862 strip_compound_expr (test, arg0));
4863 else
4864 return convert (type, test);
4865 }
4866
4867 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
4868 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4869 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4870 else if ((TREE_CODE (arg0) == COND_EXPR
4871 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4872 && TREE_CODE_CLASS (code) != '<'))
4873 && (TREE_CODE (arg1) != COND_EXPR
4874 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4875 && (! TREE_SIDE_EFFECTS (arg1)
4876 || (global_bindings_p () == 0
4877 && ! contains_placeholder_p (arg1))))
4878 {
4879 tree test, true_value, false_value;
4880 tree lhs = 0, rhs = 0;
4881
4882 if (TREE_CODE (arg0) == COND_EXPR)
4883 {
4884 test = TREE_OPERAND (arg0, 0);
4885 true_value = TREE_OPERAND (arg0, 1);
4886 false_value = TREE_OPERAND (arg0, 2);
4887 }
4888 else
4889 {
4890 tree testtype = TREE_TYPE (arg0);
4891 test = arg0;
4892 true_value = convert (testtype, integer_one_node);
4893 false_value = convert (testtype, integer_zero_node);
4894 }
4895
4896 if (TREE_CODE (arg1) != SAVE_EXPR && ! TREE_CONSTANT (arg0)
4897 && global_bindings_p () == 0
4898 && ((TREE_CODE (arg1) != VAR_DECL
4899 && TREE_CODE (arg1) != PARM_DECL)
4900 || TREE_SIDE_EFFECTS (arg1)))
4901 {
4902 if (TREE_CODE (true_value) != COND_EXPR)
4903 lhs = fold (build (code, type, true_value, arg1));
4904
4905 if (TREE_CODE (false_value) != COND_EXPR)
4906 rhs = fold (build (code, type, false_value, arg1));
4907
4908 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4909 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4910 arg1 = save_expr (arg1), lhs = rhs = 0;
4911 }
4912
4913 if (lhs == 0)
4914 lhs = fold (build (code, type, true_value, arg1));
4915
4916 if (rhs == 0)
4917 rhs = fold (build (code, type, false_value, arg1));
4918
4919 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4920 if (TREE_CODE (arg1) == SAVE_EXPR)
4921 return build (COMPOUND_EXPR, type,
4922 convert (void_type_node, arg1),
4923 strip_compound_expr (test, arg1));
4924 else
4925 return convert (type, test);
4926 }
4927 }
4928 else if (TREE_CODE_CLASS (code) == '<'
4929 && TREE_CODE (arg0) == COMPOUND_EXPR)
4930 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4931 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4932 else if (TREE_CODE_CLASS (code) == '<'
4933 && TREE_CODE (arg1) == COMPOUND_EXPR)
4934 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4935 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
4936
4937 switch (code)
4938 {
4939 case INTEGER_CST:
4940 case REAL_CST:
4941 case STRING_CST:
4942 case COMPLEX_CST:
4943 case CONSTRUCTOR:
4944 return t;
4945
4946 case CONST_DECL:
4947 return fold (DECL_INITIAL (t));
4948
4949 case NOP_EXPR:
4950 case FLOAT_EXPR:
4951 case CONVERT_EXPR:
4952 case FIX_TRUNC_EXPR:
4953 /* Other kinds of FIX are not handled properly by fold_convert. */
4954
4955 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
4956 return TREE_OPERAND (t, 0);
4957
4958 /* Handle cases of two conversions in a row. */
4959 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
4960 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
4961 {
4962 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4963 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
4964 tree final_type = TREE_TYPE (t);
4965 int inside_int = INTEGRAL_TYPE_P (inside_type);
4966 int inside_ptr = POINTER_TYPE_P (inside_type);
4967 int inside_float = FLOAT_TYPE_P (inside_type);
4968 int inside_prec = TYPE_PRECISION (inside_type);
4969 int inside_unsignedp = TREE_UNSIGNED (inside_type);
4970 int inter_int = INTEGRAL_TYPE_P (inter_type);
4971 int inter_ptr = POINTER_TYPE_P (inter_type);
4972 int inter_float = FLOAT_TYPE_P (inter_type);
4973 int inter_prec = TYPE_PRECISION (inter_type);
4974 int inter_unsignedp = TREE_UNSIGNED (inter_type);
4975 int final_int = INTEGRAL_TYPE_P (final_type);
4976 int final_ptr = POINTER_TYPE_P (final_type);
4977 int final_float = FLOAT_TYPE_P (final_type);
4978 int final_prec = TYPE_PRECISION (final_type);
4979 int final_unsignedp = TREE_UNSIGNED (final_type);
4980
4981 /* In addition to the cases of two conversions in a row
4982 handled below, if we are converting something to its own
4983 type via an object of identical or wider precision, neither
4984 conversion is needed. */
4985 if (inside_type == final_type
4986 && ((inter_int && final_int) || (inter_float && final_float))
4987 && inter_prec >= final_prec)
4988 return TREE_OPERAND (TREE_OPERAND (t, 0), 0);
4989
4990 /* Likewise, if the intermediate and final types are either both
4991 float or both integer, we don't need the middle conversion if
4992 it is wider than the final type and doesn't change the signedness
4993 (for integers). Avoid this if the final type is a pointer
4994 since then we sometimes need the inner conversion. Likewise if
4995 the outer has a precision not equal to the size of its mode. */
4996 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
4997 || (inter_float && inside_float))
4998 && inter_prec >= inside_prec
4999 && (inter_float || inter_unsignedp == inside_unsignedp)
5000 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5001 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5002 && ! final_ptr)
5003 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5004
5005 /* If we have a sign-extension of a zero-extended value, we can
5006 replace that by a single zero-extension. */
5007 if (inside_int && inter_int && final_int
5008 && inside_prec < inter_prec && inter_prec < final_prec
5009 && inside_unsignedp && !inter_unsignedp)
5010 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5011
5012 /* Two conversions in a row are not needed unless:
5013 - some conversion is floating-point (overstrict for now), or
5014 - the intermediate type is narrower than both initial and
5015 final, or
5016 - the intermediate type and innermost type differ in signedness,
5017 and the outermost type is wider than the intermediate, or
5018 - the initial type is a pointer type and the precisions of the
5019 intermediate and final types differ, or
5020 - the final type is a pointer type and the precisions of the
5021 initial and intermediate types differ. */
5022 if (! inside_float && ! inter_float && ! final_float
5023 && (inter_prec > inside_prec || inter_prec > final_prec)
5024 && ! (inside_int && inter_int
5025 && inter_unsignedp != inside_unsignedp
5026 && inter_prec < final_prec)
5027 && ((inter_unsignedp && inter_prec > inside_prec)
5028 == (final_unsignedp && final_prec > inter_prec))
5029 && ! (inside_ptr && inter_prec != final_prec)
5030 && ! (final_ptr && inside_prec != inter_prec)
5031 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5032 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5033 && ! final_ptr)
5034 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5035 }
5036
5037 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5038 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5039 /* Detect assigning a bitfield. */
5040 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5041 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5042 {
5043 /* Don't leave an assignment inside a conversion
5044 unless assigning a bitfield. */
5045 tree prev = TREE_OPERAND (t, 0);
5046 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
5047 /* First do the assignment, then return converted constant. */
5048 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
5049 TREE_USED (t) = 1;
5050 return t;
5051 }
5052 if (!wins)
5053 {
5054 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
5055 return t;
5056 }
5057 return fold_convert (t, arg0);
5058
5059 #if 0 /* This loses on &"foo"[0]. */
5060 case ARRAY_REF:
5061 {
5062 int i;
5063
5064 /* Fold an expression like: "foo"[2] */
5065 if (TREE_CODE (arg0) == STRING_CST
5066 && TREE_CODE (arg1) == INTEGER_CST
5067 && !TREE_INT_CST_HIGH (arg1)
5068 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
5069 {
5070 t = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
5071 TREE_TYPE (t) = TREE_TYPE (TREE_TYPE (arg0));
5072 force_fit_type (t, 0);
5073 }
5074 }
5075 return t;
5076 #endif /* 0 */
5077
5078 case COMPONENT_REF:
5079 if (TREE_CODE (arg0) == CONSTRUCTOR)
5080 {
5081 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5082 if (m)
5083 t = TREE_VALUE (m);
5084 }
5085 return t;
5086
5087 case RANGE_EXPR:
5088 TREE_CONSTANT (t) = wins;
5089 return t;
5090
5091 case NEGATE_EXPR:
5092 if (wins)
5093 {
5094 if (TREE_CODE (arg0) == INTEGER_CST)
5095 {
5096 HOST_WIDE_INT low, high;
5097 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5098 TREE_INT_CST_HIGH (arg0),
5099 &low, &high);
5100 t = build_int_2 (low, high);
5101 TREE_TYPE (t) = type;
5102 TREE_OVERFLOW (t)
5103 = (TREE_OVERFLOW (arg0)
5104 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
5105 TREE_CONSTANT_OVERFLOW (t)
5106 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5107 }
5108 else if (TREE_CODE (arg0) == REAL_CST)
5109 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5110 }
5111 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5112 return TREE_OPERAND (arg0, 0);
5113
5114 /* Convert - (a - b) to (b - a) for non-floating-point. */
5115 else if (TREE_CODE (arg0) == MINUS_EXPR
5116 && (! FLOAT_TYPE_P (type) || flag_fast_math))
5117 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
5118 TREE_OPERAND (arg0, 0));
5119
5120 return t;
5121
5122 case ABS_EXPR:
5123 if (wins)
5124 {
5125 if (TREE_CODE (arg0) == INTEGER_CST)
5126 {
5127 if (! TREE_UNSIGNED (type)
5128 && TREE_INT_CST_HIGH (arg0) < 0)
5129 {
5130 HOST_WIDE_INT low, high;
5131 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5132 TREE_INT_CST_HIGH (arg0),
5133 &low, &high);
5134 t = build_int_2 (low, high);
5135 TREE_TYPE (t) = type;
5136 TREE_OVERFLOW (t)
5137 = (TREE_OVERFLOW (arg0)
5138 | force_fit_type (t, overflow));
5139 TREE_CONSTANT_OVERFLOW (t)
5140 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5141 }
5142 }
5143 else if (TREE_CODE (arg0) == REAL_CST)
5144 {
5145 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5146 t = build_real (type,
5147 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5148 }
5149 }
5150 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
5151 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
5152 return t;
5153
5154 case CONJ_EXPR:
5155 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5156 return arg0;
5157 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5158 return build (COMPLEX_EXPR, TREE_TYPE (arg0),
5159 TREE_OPERAND (arg0, 0),
5160 negate_expr (TREE_OPERAND (arg0, 1)));
5161 else if (TREE_CODE (arg0) == COMPLEX_CST)
5162 return build_complex (type, TREE_OPERAND (arg0, 0),
5163 negate_expr (TREE_OPERAND (arg0, 1)));
5164 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5165 return fold (build (TREE_CODE (arg0), type,
5166 fold (build1 (CONJ_EXPR, type,
5167 TREE_OPERAND (arg0, 0))),
5168 fold (build1 (CONJ_EXPR,
5169 type, TREE_OPERAND (arg0, 1)))));
5170 else if (TREE_CODE (arg0) == CONJ_EXPR)
5171 return TREE_OPERAND (arg0, 0);
5172 return t;
5173
5174 case BIT_NOT_EXPR:
5175 if (wins)
5176 {
5177 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5178 ~ TREE_INT_CST_HIGH (arg0));
5179 TREE_TYPE (t) = type;
5180 force_fit_type (t, 0);
5181 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5182 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5183 }
5184 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5185 return TREE_OPERAND (arg0, 0);
5186 return t;
5187
5188 case PLUS_EXPR:
5189 /* A + (-B) -> A - B */
5190 if (TREE_CODE (arg1) == NEGATE_EXPR)
5191 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5192 /* (-A) + B -> B - A */
5193 if (TREE_CODE (arg0) == NEGATE_EXPR)
5194 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5195 else if (! FLOAT_TYPE_P (type))
5196 {
5197 if (integer_zerop (arg1))
5198 return non_lvalue (convert (type, arg0));
5199
5200 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5201 with a constant, and the two constants have no bits in common,
5202 we should treat this as a BIT_IOR_EXPR since this may produce more
5203 simplifications. */
5204 if (TREE_CODE (arg0) == BIT_AND_EXPR
5205 && TREE_CODE (arg1) == BIT_AND_EXPR
5206 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5207 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5208 && integer_zerop (const_binop (BIT_AND_EXPR,
5209 TREE_OPERAND (arg0, 1),
5210 TREE_OPERAND (arg1, 1), 0)))
5211 {
5212 code = BIT_IOR_EXPR;
5213 goto bit_ior;
5214 }
5215
5216 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5217 (plus (plus (mult) (mult)) (foo)) so that we can
5218 take advantage of the factoring cases below. */
5219 if ((TREE_CODE (arg0) == PLUS_EXPR
5220 && TREE_CODE (arg1) == MULT_EXPR)
5221 || (TREE_CODE (arg1) == PLUS_EXPR
5222 && TREE_CODE (arg0) == MULT_EXPR))
5223 {
5224 tree parg0, parg1, parg, marg;
5225
5226 if (TREE_CODE (arg0) == PLUS_EXPR)
5227 parg = arg0, marg = arg1;
5228 else
5229 parg = arg1, marg = arg0;
5230 parg0 = TREE_OPERAND (parg, 0);
5231 parg1 = TREE_OPERAND (parg, 1);
5232 STRIP_NOPS (parg0);
5233 STRIP_NOPS (parg1);
5234
5235 if (TREE_CODE (parg0) == MULT_EXPR
5236 && TREE_CODE (parg1) != MULT_EXPR)
5237 return fold (build (PLUS_EXPR, type,
5238 fold (build (PLUS_EXPR, type, parg0, marg)),
5239 parg1));
5240 if (TREE_CODE (parg0) != MULT_EXPR
5241 && TREE_CODE (parg1) == MULT_EXPR)
5242 return fold (build (PLUS_EXPR, type,
5243 fold (build (PLUS_EXPR, type, parg1, marg)),
5244 parg0));
5245 }
5246
5247 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5248 {
5249 tree arg00, arg01, arg10, arg11;
5250 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5251
5252 /* (A * C) + (B * C) -> (A+B) * C.
5253 We are most concerned about the case where C is a constant,
5254 but other combinations show up during loop reduction. Since
5255 it is not difficult, try all four possibilities. */
5256
5257 arg00 = TREE_OPERAND (arg0, 0);
5258 arg01 = TREE_OPERAND (arg0, 1);
5259 arg10 = TREE_OPERAND (arg1, 0);
5260 arg11 = TREE_OPERAND (arg1, 1);
5261 same = NULL_TREE;
5262
5263 if (operand_equal_p (arg01, arg11, 0))
5264 same = arg01, alt0 = arg00, alt1 = arg10;
5265 else if (operand_equal_p (arg00, arg10, 0))
5266 same = arg00, alt0 = arg01, alt1 = arg11;
5267 else if (operand_equal_p (arg00, arg11, 0))
5268 same = arg00, alt0 = arg01, alt1 = arg10;
5269 else if (operand_equal_p (arg01, arg10, 0))
5270 same = arg01, alt0 = arg00, alt1 = arg11;
5271
5272 /* No identical multiplicands; see if we can find a common
5273 power-of-two factor in non-power-of-two multiplies. This
5274 can help in multi-dimensional array access. */
5275 else if (TREE_CODE (arg01) == INTEGER_CST
5276 && TREE_CODE (arg11) == INTEGER_CST
5277 && TREE_INT_CST_HIGH (arg01) == 0
5278 && TREE_INT_CST_HIGH (arg11) == 0)
5279 {
5280 HOST_WIDE_INT int01, int11, tmp;
5281 int01 = TREE_INT_CST_LOW (arg01);
5282 int11 = TREE_INT_CST_LOW (arg11);
5283
5284 /* Move min of absolute values to int11. */
5285 if ((int01 >= 0 ? int01 : -int01)
5286 < (int11 >= 0 ? int11 : -int11))
5287 {
5288 tmp = int01, int01 = int11, int11 = tmp;
5289 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5290 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5291 }
5292
5293 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5294 {
5295 alt0 = fold (build (MULT_EXPR, type, arg00,
5296 build_int_2 (int01 / int11, 0)));
5297 alt1 = arg10;
5298 same = arg11;
5299 }
5300 }
5301
5302 if (same)
5303 return fold (build (MULT_EXPR, type,
5304 fold (build (PLUS_EXPR, type, alt0, alt1)),
5305 same));
5306 }
5307 }
5308 /* In IEEE floating point, x+0 may not equal x. */
5309 else if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5310 || flag_fast_math)
5311 && real_zerop (arg1))
5312 return non_lvalue (convert (type, arg0));
5313 /* x+(-0) equals x, even for IEEE. */
5314 else if (TREE_CODE (arg1) == REAL_CST
5315 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (arg1)))
5316 return non_lvalue (convert (type, arg0));
5317
5318 bit_rotate:
5319 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5320 is a rotate of A by C1 bits. */
5321 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5322 is a rotate of A by B bits. */
5323 {
5324 register enum tree_code code0, code1;
5325 code0 = TREE_CODE (arg0);
5326 code1 = TREE_CODE (arg1);
5327 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5328 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5329 && operand_equal_p (TREE_OPERAND (arg0, 0),
5330 TREE_OPERAND (arg1,0), 0)
5331 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5332 {
5333 register tree tree01, tree11;
5334 register enum tree_code code01, code11;
5335
5336 tree01 = TREE_OPERAND (arg0, 1);
5337 tree11 = TREE_OPERAND (arg1, 1);
5338 STRIP_NOPS (tree01);
5339 STRIP_NOPS (tree11);
5340 code01 = TREE_CODE (tree01);
5341 code11 = TREE_CODE (tree11);
5342 if (code01 == INTEGER_CST
5343 && code11 == INTEGER_CST
5344 && TREE_INT_CST_HIGH (tree01) == 0
5345 && TREE_INT_CST_HIGH (tree11) == 0
5346 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5347 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5348 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5349 code0 == LSHIFT_EXPR ? tree01 : tree11);
5350 else if (code11 == MINUS_EXPR)
5351 {
5352 tree tree110, tree111;
5353 tree110 = TREE_OPERAND (tree11, 0);
5354 tree111 = TREE_OPERAND (tree11, 1);
5355 STRIP_NOPS (tree110);
5356 STRIP_NOPS (tree111);
5357 if (TREE_CODE (tree110) == INTEGER_CST
5358 && TREE_INT_CST_HIGH (tree110) == 0
5359 && (TREE_INT_CST_LOW (tree110)
5360 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5361 && operand_equal_p (tree01, tree111, 0))
5362 return build ((code0 == LSHIFT_EXPR
5363 ? LROTATE_EXPR
5364 : RROTATE_EXPR),
5365 type, TREE_OPERAND (arg0, 0), tree01);
5366 }
5367 else if (code01 == MINUS_EXPR)
5368 {
5369 tree tree010, tree011;
5370 tree010 = TREE_OPERAND (tree01, 0);
5371 tree011 = TREE_OPERAND (tree01, 1);
5372 STRIP_NOPS (tree010);
5373 STRIP_NOPS (tree011);
5374 if (TREE_CODE (tree010) == INTEGER_CST
5375 && TREE_INT_CST_HIGH (tree010) == 0
5376 && (TREE_INT_CST_LOW (tree010)
5377 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5378 && operand_equal_p (tree11, tree011, 0))
5379 return build ((code0 != LSHIFT_EXPR
5380 ? LROTATE_EXPR
5381 : RROTATE_EXPR),
5382 type, TREE_OPERAND (arg0, 0), tree11);
5383 }
5384 }
5385 }
5386
5387
5388 associate:
5389 /* In most languages, can't associate operations on floats through
5390 parentheses. Rather than remember where the parentheses were, we
5391 don't associate floats at all. It shouldn't matter much. However,
5392 associating multiplications is only very slightly inaccurate, so do
5393 that if -ffast-math is specified. */
5394
5395 if (! wins
5396 && (! FLOAT_TYPE_P (type)
5397 || (flag_fast_math && code != MULT_EXPR)))
5398 {
5399 tree var0, con0, lit0, var1, con1, lit1;
5400
5401 /* Split both trees into variables, constants, and literals. Then
5402 associate each group together, the constants with literals,
5403 then the result with variables. This increases the chances of
5404 literals being recombined later and of generating relocatable
5405 expressions for the sum of a constant and literal. */
5406 var0 = split_tree (arg0, code, &con0, &lit0, 0);
5407 var1 = split_tree (arg1, code, &con1, &lit1, code == MINUS_EXPR);
5408
5409 /* Only do something if we found more than two objects. Otherwise,
5410 nothing has changed and we risk infinite recursion. */
5411 if (2 < ((var0 != 0) + (var1 != 0) + (con0 != 0) + (con1 != 0)
5412 + (lit0 != 0) + (lit1 != 0)))
5413 {
5414 var0 = associate_trees (var0, var1, code, type);
5415 con0 = associate_trees (con0, con1, code, type);
5416 lit0 = associate_trees (lit0, lit1, code, type);
5417 con0 = associate_trees (con0, lit0, code, type);
5418 return convert (type, associate_trees (var0, con0, code, type));
5419 }
5420 }
5421
5422 binary:
5423 #if defined (REAL_IS_NOT_DOUBLE) && ! defined (REAL_ARITHMETIC)
5424 if (TREE_CODE (arg1) == REAL_CST)
5425 return t;
5426 #endif /* REAL_IS_NOT_DOUBLE, and no REAL_ARITHMETIC */
5427 if (wins)
5428 t1 = const_binop (code, arg0, arg1, 0);
5429 if (t1 != NULL_TREE)
5430 {
5431 /* The return value should always have
5432 the same type as the original expression. */
5433 if (TREE_TYPE (t1) != TREE_TYPE (t))
5434 t1 = convert (TREE_TYPE (t), t1);
5435
5436 return t1;
5437 }
5438 return t;
5439
5440 case MINUS_EXPR:
5441 /* A - (-B) -> A + B */
5442 if (TREE_CODE (arg1) == NEGATE_EXPR)
5443 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5444 /* (-A) - CST -> (-CST) - A for floating point (what about ints ?) */
5445 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
5446 return
5447 fold (build (MINUS_EXPR, type,
5448 build_real (TREE_TYPE (arg1),
5449 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1))),
5450 TREE_OPERAND (arg0, 0)));
5451
5452 if (! FLOAT_TYPE_P (type))
5453 {
5454 if (! wins && integer_zerop (arg0))
5455 return negate_expr (arg1);
5456 if (integer_zerop (arg1))
5457 return non_lvalue (convert (type, arg0));
5458
5459 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
5460 about the case where C is a constant, just try one of the
5461 four possibilities. */
5462
5463 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
5464 && operand_equal_p (TREE_OPERAND (arg0, 1),
5465 TREE_OPERAND (arg1, 1), 0))
5466 return fold (build (MULT_EXPR, type,
5467 fold (build (MINUS_EXPR, type,
5468 TREE_OPERAND (arg0, 0),
5469 TREE_OPERAND (arg1, 0))),
5470 TREE_OPERAND (arg0, 1)));
5471 }
5472
5473 else if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5474 || flag_fast_math)
5475 {
5476 /* Except with IEEE floating point, 0-x equals -x. */
5477 if (! wins && real_zerop (arg0))
5478 return negate_expr (arg1);
5479 /* Except with IEEE floating point, x-0 equals x. */
5480 if (real_zerop (arg1))
5481 return non_lvalue (convert (type, arg0));
5482 }
5483
5484 /* Fold &x - &x. This can happen from &x.foo - &x.
5485 This is unsafe for certain floats even in non-IEEE formats.
5486 In IEEE, it is unsafe because it does wrong for NaNs.
5487 Also note that operand_equal_p is always false if an operand
5488 is volatile. */
5489
5490 if ((! FLOAT_TYPE_P (type) || flag_fast_math)
5491 && operand_equal_p (arg0, arg1, 0))
5492 return convert (type, integer_zero_node);
5493
5494 goto associate;
5495
5496 case MULT_EXPR:
5497 /* (-A) * (-B) -> A * B */
5498 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5499 return fold (build (MULT_EXPR, type, TREE_OPERAND (arg0, 0),
5500 TREE_OPERAND (arg1, 0)));
5501
5502 if (! FLOAT_TYPE_P (type))
5503 {
5504 if (integer_zerop (arg1))
5505 return omit_one_operand (type, arg1, arg0);
5506 if (integer_onep (arg1))
5507 return non_lvalue (convert (type, arg0));
5508
5509 /* (a * (1 << b)) is (a << b) */
5510 if (TREE_CODE (arg1) == LSHIFT_EXPR
5511 && integer_onep (TREE_OPERAND (arg1, 0)))
5512 return fold (build (LSHIFT_EXPR, type, arg0,
5513 TREE_OPERAND (arg1, 1)));
5514 if (TREE_CODE (arg0) == LSHIFT_EXPR
5515 && integer_onep (TREE_OPERAND (arg0, 0)))
5516 return fold (build (LSHIFT_EXPR, type, arg1,
5517 TREE_OPERAND (arg0, 1)));
5518
5519 if (TREE_CODE (arg1) == INTEGER_CST
5520 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5521 code, NULL_TREE)))
5522 return convert (type, tem);
5523
5524 }
5525 else
5526 {
5527 /* x*0 is 0, except for IEEE floating point. */
5528 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5529 || flag_fast_math)
5530 && real_zerop (arg1))
5531 return omit_one_operand (type, arg1, arg0);
5532 /* In IEEE floating point, x*1 is not equivalent to x for snans.
5533 However, ANSI says we can drop signals,
5534 so we can do this anyway. */
5535 if (real_onep (arg1))
5536 return non_lvalue (convert (type, arg0));
5537 /* x*2 is x+x */
5538 if (! wins && real_twop (arg1) && global_bindings_p () == 0
5539 && ! contains_placeholder_p (arg0))
5540 {
5541 tree arg = save_expr (arg0);
5542 return build (PLUS_EXPR, type, arg, arg);
5543 }
5544 }
5545 goto associate;
5546
5547 case BIT_IOR_EXPR:
5548 bit_ior:
5549 if (integer_all_onesp (arg1))
5550 return omit_one_operand (type, arg1, arg0);
5551 if (integer_zerop (arg1))
5552 return non_lvalue (convert (type, arg0));
5553 t1 = distribute_bit_expr (code, type, arg0, arg1);
5554 if (t1 != NULL_TREE)
5555 return t1;
5556
5557 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
5558
5559 This results in more efficient code for machines without a NAND
5560 instruction. Combine will canonicalize to the first form
5561 which will allow use of NAND instructions provided by the
5562 backend if they exist. */
5563 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5564 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5565 {
5566 return fold (build1 (BIT_NOT_EXPR, type,
5567 build (BIT_AND_EXPR, type,
5568 TREE_OPERAND (arg0, 0),
5569 TREE_OPERAND (arg1, 0))));
5570 }
5571
5572 /* See if this can be simplified into a rotate first. If that
5573 is unsuccessful continue in the association code. */
5574 goto bit_rotate;
5575
5576 case BIT_XOR_EXPR:
5577 if (integer_zerop (arg1))
5578 return non_lvalue (convert (type, arg0));
5579 if (integer_all_onesp (arg1))
5580 return fold (build1 (BIT_NOT_EXPR, type, arg0));
5581
5582 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
5583 with a constant, and the two constants have no bits in common,
5584 we should treat this as a BIT_IOR_EXPR since this may produce more
5585 simplifications. */
5586 if (TREE_CODE (arg0) == BIT_AND_EXPR
5587 && TREE_CODE (arg1) == BIT_AND_EXPR
5588 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5589 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5590 && integer_zerop (const_binop (BIT_AND_EXPR,
5591 TREE_OPERAND (arg0, 1),
5592 TREE_OPERAND (arg1, 1), 0)))
5593 {
5594 code = BIT_IOR_EXPR;
5595 goto bit_ior;
5596 }
5597
5598 /* See if this can be simplified into a rotate first. If that
5599 is unsuccessful continue in the association code. */
5600 goto bit_rotate;
5601
5602 case BIT_AND_EXPR:
5603 bit_and:
5604 if (integer_all_onesp (arg1))
5605 return non_lvalue (convert (type, arg0));
5606 if (integer_zerop (arg1))
5607 return omit_one_operand (type, arg1, arg0);
5608 t1 = distribute_bit_expr (code, type, arg0, arg1);
5609 if (t1 != NULL_TREE)
5610 return t1;
5611 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
5612 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == NOP_EXPR
5613 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0))))
5614 {
5615 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)));
5616 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
5617 && (~TREE_INT_CST_LOW (arg0)
5618 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
5619 return build1 (NOP_EXPR, type, TREE_OPERAND (arg1, 0));
5620 }
5621 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
5622 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5623 {
5624 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
5625 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
5626 && (~TREE_INT_CST_LOW (arg1)
5627 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
5628 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
5629 }
5630
5631 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
5632
5633 This results in more efficient code for machines without a NOR
5634 instruction. Combine will canonicalize to the first form
5635 which will allow use of NOR instructions provided by the
5636 backend if they exist. */
5637 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5638 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5639 {
5640 return fold (build1 (BIT_NOT_EXPR, type,
5641 build (BIT_IOR_EXPR, type,
5642 TREE_OPERAND (arg0, 0),
5643 TREE_OPERAND (arg1, 0))));
5644 }
5645
5646 goto associate;
5647
5648 case BIT_ANDTC_EXPR:
5649 if (integer_all_onesp (arg0))
5650 return non_lvalue (convert (type, arg1));
5651 if (integer_zerop (arg0))
5652 return omit_one_operand (type, arg0, arg1);
5653 if (TREE_CODE (arg1) == INTEGER_CST)
5654 {
5655 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
5656 code = BIT_AND_EXPR;
5657 goto bit_and;
5658 }
5659 goto binary;
5660
5661 case RDIV_EXPR:
5662 /* In most cases, do nothing with a divide by zero. */
5663 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
5664 #ifndef REAL_INFINITY
5665 if (TREE_CODE (arg1) == REAL_CST && real_zerop (arg1))
5666 return t;
5667 #endif
5668 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
5669
5670 /* (-A) / (-B) -> A / B */
5671 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5672 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
5673 TREE_OPERAND (arg1, 0)));
5674
5675 /* In IEEE floating point, x/1 is not equivalent to x for snans.
5676 However, ANSI says we can drop signals, so we can do this anyway. */
5677 if (real_onep (arg1))
5678 return non_lvalue (convert (type, arg0));
5679
5680 /* If ARG1 is a constant, we can convert this to a multiply by the
5681 reciprocal. This does not have the same rounding properties,
5682 so only do this if -ffast-math. We can actually always safely
5683 do it if ARG1 is a power of two, but it's hard to tell if it is
5684 or not in a portable manner. */
5685 if (TREE_CODE (arg1) == REAL_CST)
5686 {
5687 if (flag_fast_math
5688 && 0 != (tem = const_binop (code, build_real (type, dconst1),
5689 arg1, 0)))
5690 return fold (build (MULT_EXPR, type, arg0, tem));
5691 /* Find the reciprocal if optimizing and the result is exact. */
5692 else if (optimize)
5693 {
5694 REAL_VALUE_TYPE r;
5695 r = TREE_REAL_CST (arg1);
5696 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
5697 {
5698 tem = build_real (type, r);
5699 return fold (build (MULT_EXPR, type, arg0, tem));
5700 }
5701 }
5702 }
5703 goto binary;
5704
5705 case TRUNC_DIV_EXPR:
5706 case ROUND_DIV_EXPR:
5707 case FLOOR_DIV_EXPR:
5708 case CEIL_DIV_EXPR:
5709 case EXACT_DIV_EXPR:
5710 if (integer_onep (arg1))
5711 return non_lvalue (convert (type, arg0));
5712 if (integer_zerop (arg1))
5713 return t;
5714
5715 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
5716 operation, EXACT_DIV_EXPR.
5717
5718 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
5719 At one time others generated faster code, it's not clear if they do
5720 after the last round to changes to the DIV code in expmed.c. */
5721 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
5722 && multiple_of_p (type, arg0, arg1))
5723 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
5724
5725 if (TREE_CODE (arg1) == INTEGER_CST
5726 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5727 code, NULL_TREE)))
5728 return convert (type, tem);
5729
5730 goto binary;
5731
5732 case CEIL_MOD_EXPR:
5733 case FLOOR_MOD_EXPR:
5734 case ROUND_MOD_EXPR:
5735 case TRUNC_MOD_EXPR:
5736 if (integer_onep (arg1))
5737 return omit_one_operand (type, integer_zero_node, arg0);
5738 if (integer_zerop (arg1))
5739 return t;
5740
5741 if (TREE_CODE (arg1) == INTEGER_CST
5742 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5743 code, NULL_TREE)))
5744 return convert (type, tem);
5745
5746 goto binary;
5747
5748 case LSHIFT_EXPR:
5749 case RSHIFT_EXPR:
5750 case LROTATE_EXPR:
5751 case RROTATE_EXPR:
5752 if (integer_zerop (arg1))
5753 return non_lvalue (convert (type, arg0));
5754 /* Since negative shift count is not well-defined,
5755 don't try to compute it in the compiler. */
5756 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
5757 return t;
5758 /* Rewrite an LROTATE_EXPR by a constant into an
5759 RROTATE_EXPR by a new constant. */
5760 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
5761 {
5762 TREE_SET_CODE (t, RROTATE_EXPR);
5763 code = RROTATE_EXPR;
5764 TREE_OPERAND (t, 1) = arg1
5765 = const_binop
5766 (MINUS_EXPR,
5767 convert (TREE_TYPE (arg1),
5768 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
5769 arg1, 0);
5770 if (tree_int_cst_sgn (arg1) < 0)
5771 return t;
5772 }
5773
5774 /* If we have a rotate of a bit operation with the rotate count and
5775 the second operand of the bit operation both constant,
5776 permute the two operations. */
5777 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5778 && (TREE_CODE (arg0) == BIT_AND_EXPR
5779 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
5780 || TREE_CODE (arg0) == BIT_IOR_EXPR
5781 || TREE_CODE (arg0) == BIT_XOR_EXPR)
5782 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
5783 return fold (build (TREE_CODE (arg0), type,
5784 fold (build (code, type,
5785 TREE_OPERAND (arg0, 0), arg1)),
5786 fold (build (code, type,
5787 TREE_OPERAND (arg0, 1), arg1))));
5788
5789 /* Two consecutive rotates adding up to the width of the mode can
5790 be ignored. */
5791 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5792 && TREE_CODE (arg0) == RROTATE_EXPR
5793 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5794 && TREE_INT_CST_HIGH (arg1) == 0
5795 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
5796 && ((TREE_INT_CST_LOW (arg1)
5797 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
5798 == GET_MODE_BITSIZE (TYPE_MODE (type))))
5799 return TREE_OPERAND (arg0, 0);
5800
5801 goto binary;
5802
5803 case MIN_EXPR:
5804 if (operand_equal_p (arg0, arg1, 0))
5805 return arg0;
5806 if (INTEGRAL_TYPE_P (type)
5807 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
5808 return omit_one_operand (type, arg1, arg0);
5809 goto associate;
5810
5811 case MAX_EXPR:
5812 if (operand_equal_p (arg0, arg1, 0))
5813 return arg0;
5814 if (INTEGRAL_TYPE_P (type)
5815 && TYPE_MAX_VALUE (type)
5816 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
5817 return omit_one_operand (type, arg1, arg0);
5818 goto associate;
5819
5820 case TRUTH_NOT_EXPR:
5821 /* Note that the operand of this must be an int
5822 and its values must be 0 or 1.
5823 ("true" is a fixed value perhaps depending on the language,
5824 but we don't handle values other than 1 correctly yet.) */
5825 tem = invert_truthvalue (arg0);
5826 /* Avoid infinite recursion. */
5827 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
5828 return t;
5829 return convert (type, tem);
5830
5831 case TRUTH_ANDIF_EXPR:
5832 /* Note that the operands of this must be ints
5833 and their values must be 0 or 1.
5834 ("true" is a fixed value perhaps depending on the language.) */
5835 /* If first arg is constant zero, return it. */
5836 if (integer_zerop (arg0))
5837 return arg0;
5838 case TRUTH_AND_EXPR:
5839 /* If either arg is constant true, drop it. */
5840 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5841 return non_lvalue (arg1);
5842 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
5843 return non_lvalue (arg0);
5844 /* If second arg is constant zero, result is zero, but first arg
5845 must be evaluated. */
5846 if (integer_zerop (arg1))
5847 return omit_one_operand (type, arg1, arg0);
5848 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
5849 case will be handled here. */
5850 if (integer_zerop (arg0))
5851 return omit_one_operand (type, arg0, arg1);
5852
5853 truth_andor:
5854 /* We only do these simplifications if we are optimizing. */
5855 if (!optimize)
5856 return t;
5857
5858 /* Check for things like (A || B) && (A || C). We can convert this
5859 to A || (B && C). Note that either operator can be any of the four
5860 truth and/or operations and the transformation will still be
5861 valid. Also note that we only care about order for the
5862 ANDIF and ORIF operators. If B contains side effects, this
5863 might change the truth-value of A. */
5864 if (TREE_CODE (arg0) == TREE_CODE (arg1)
5865 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
5866 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
5867 || TREE_CODE (arg0) == TRUTH_AND_EXPR
5868 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
5869 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
5870 {
5871 tree a00 = TREE_OPERAND (arg0, 0);
5872 tree a01 = TREE_OPERAND (arg0, 1);
5873 tree a10 = TREE_OPERAND (arg1, 0);
5874 tree a11 = TREE_OPERAND (arg1, 1);
5875 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
5876 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
5877 && (code == TRUTH_AND_EXPR
5878 || code == TRUTH_OR_EXPR));
5879
5880 if (operand_equal_p (a00, a10, 0))
5881 return fold (build (TREE_CODE (arg0), type, a00,
5882 fold (build (code, type, a01, a11))));
5883 else if (commutative && operand_equal_p (a00, a11, 0))
5884 return fold (build (TREE_CODE (arg0), type, a00,
5885 fold (build (code, type, a01, a10))));
5886 else if (commutative && operand_equal_p (a01, a10, 0))
5887 return fold (build (TREE_CODE (arg0), type, a01,
5888 fold (build (code, type, a00, a11))));
5889
5890 /* This case if tricky because we must either have commutative
5891 operators or else A10 must not have side-effects. */
5892
5893 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
5894 && operand_equal_p (a01, a11, 0))
5895 return fold (build (TREE_CODE (arg0), type,
5896 fold (build (code, type, a00, a10)),
5897 a01));
5898 }
5899
5900 /* See if we can build a range comparison. */
5901 if (0 != (tem = fold_range_test (t)))
5902 return tem;
5903
5904 /* Check for the possibility of merging component references. If our
5905 lhs is another similar operation, try to merge its rhs with our
5906 rhs. Then try to merge our lhs and rhs. */
5907 if (TREE_CODE (arg0) == code
5908 && 0 != (tem = fold_truthop (code, type,
5909 TREE_OPERAND (arg0, 1), arg1)))
5910 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
5911
5912 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
5913 return tem;
5914
5915 return t;
5916
5917 case TRUTH_ORIF_EXPR:
5918 /* Note that the operands of this must be ints
5919 and their values must be 0 or true.
5920 ("true" is a fixed value perhaps depending on the language.) */
5921 /* If first arg is constant true, return it. */
5922 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5923 return arg0;
5924 case TRUTH_OR_EXPR:
5925 /* If either arg is constant zero, drop it. */
5926 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
5927 return non_lvalue (arg1);
5928 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1))
5929 return non_lvalue (arg0);
5930 /* If second arg is constant true, result is true, but we must
5931 evaluate first arg. */
5932 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
5933 return omit_one_operand (type, arg1, arg0);
5934 /* Likewise for first arg, but note this only occurs here for
5935 TRUTH_OR_EXPR. */
5936 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5937 return omit_one_operand (type, arg0, arg1);
5938 goto truth_andor;
5939
5940 case TRUTH_XOR_EXPR:
5941 /* If either arg is constant zero, drop it. */
5942 if (integer_zerop (arg0))
5943 return non_lvalue (arg1);
5944 if (integer_zerop (arg1))
5945 return non_lvalue (arg0);
5946 /* If either arg is constant true, this is a logical inversion. */
5947 if (integer_onep (arg0))
5948 return non_lvalue (invert_truthvalue (arg1));
5949 if (integer_onep (arg1))
5950 return non_lvalue (invert_truthvalue (arg0));
5951 return t;
5952
5953 case EQ_EXPR:
5954 case NE_EXPR:
5955 case LT_EXPR:
5956 case GT_EXPR:
5957 case LE_EXPR:
5958 case GE_EXPR:
5959 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
5960 {
5961 /* (-a) CMP (-b) -> b CMP a */
5962 if (TREE_CODE (arg0) == NEGATE_EXPR
5963 && TREE_CODE (arg1) == NEGATE_EXPR)
5964 return fold (build (code, type, TREE_OPERAND (arg1, 0),
5965 TREE_OPERAND (arg0, 0)));
5966 /* (-a) CMP CST -> a swap(CMP) (-CST) */
5967 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
5968 return
5969 fold (build
5970 (swap_tree_comparison (code), type,
5971 TREE_OPERAND (arg0, 0),
5972 build_real (TREE_TYPE (arg1),
5973 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1)))));
5974 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
5975 /* a CMP (-0) -> a CMP 0 */
5976 if (TREE_CODE (arg1) == REAL_CST
5977 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (arg1)))
5978 return fold (build (code, type, arg0,
5979 build_real (TREE_TYPE (arg1), dconst0)));
5980 }
5981
5982
5983 /* If one arg is a constant integer, put it last. */
5984 if (TREE_CODE (arg0) == INTEGER_CST
5985 && TREE_CODE (arg1) != INTEGER_CST)
5986 {
5987 TREE_OPERAND (t, 0) = arg1;
5988 TREE_OPERAND (t, 1) = arg0;
5989 arg0 = TREE_OPERAND (t, 0);
5990 arg1 = TREE_OPERAND (t, 1);
5991 code = swap_tree_comparison (code);
5992 TREE_SET_CODE (t, code);
5993 }
5994
5995 /* Convert foo++ == CONST into ++foo == CONST + INCR.
5996 First, see if one arg is constant; find the constant arg
5997 and the other one. */
5998 {
5999 tree constop = 0, varop = NULL_TREE;
6000 int constopnum = -1;
6001
6002 if (TREE_CONSTANT (arg1))
6003 constopnum = 1, constop = arg1, varop = arg0;
6004 if (TREE_CONSTANT (arg0))
6005 constopnum = 0, constop = arg0, varop = arg1;
6006
6007 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
6008 {
6009 /* This optimization is invalid for ordered comparisons
6010 if CONST+INCR overflows or if foo+incr might overflow.
6011 This optimization is invalid for floating point due to rounding.
6012 For pointer types we assume overflow doesn't happen. */
6013 if (POINTER_TYPE_P (TREE_TYPE (varop))
6014 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6015 && (code == EQ_EXPR || code == NE_EXPR)))
6016 {
6017 tree newconst
6018 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
6019 constop, TREE_OPERAND (varop, 1)));
6020 TREE_SET_CODE (varop, PREINCREMENT_EXPR);
6021
6022 /* If VAROP is a reference to a bitfield, we must mask
6023 the constant by the width of the field. */
6024 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6025 && DECL_BIT_FIELD(TREE_OPERAND
6026 (TREE_OPERAND (varop, 0), 1)))
6027 {
6028 int size
6029 = TREE_INT_CST_LOW (DECL_SIZE
6030 (TREE_OPERAND
6031 (TREE_OPERAND (varop, 0), 1)));
6032 tree mask, unsigned_type;
6033 int precision;
6034 tree folded_compare;
6035
6036 /* First check whether the comparison would come out
6037 always the same. If we don't do that we would
6038 change the meaning with the masking. */
6039 if (constopnum == 0)
6040 folded_compare = fold (build (code, type, constop,
6041 TREE_OPERAND (varop, 0)));
6042 else
6043 folded_compare = fold (build (code, type,
6044 TREE_OPERAND (varop, 0),
6045 constop));
6046 if (integer_zerop (folded_compare)
6047 || integer_onep (folded_compare))
6048 return omit_one_operand (type, folded_compare, varop);
6049
6050 unsigned_type = type_for_size (size, 1);
6051 precision = TYPE_PRECISION (unsigned_type);
6052 mask = build_int_2 (~0, ~0);
6053 TREE_TYPE (mask) = unsigned_type;
6054 force_fit_type (mask, 0);
6055 mask = const_binop (RSHIFT_EXPR, mask,
6056 size_int (precision - size), 0);
6057 newconst = fold (build (BIT_AND_EXPR,
6058 TREE_TYPE (varop), newconst,
6059 convert (TREE_TYPE (varop),
6060 mask)));
6061 }
6062
6063
6064 t = build (code, type, TREE_OPERAND (t, 0),
6065 TREE_OPERAND (t, 1));
6066 TREE_OPERAND (t, constopnum) = newconst;
6067 return t;
6068 }
6069 }
6070 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
6071 {
6072 if (POINTER_TYPE_P (TREE_TYPE (varop))
6073 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6074 && (code == EQ_EXPR || code == NE_EXPR)))
6075 {
6076 tree newconst
6077 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
6078 constop, TREE_OPERAND (varop, 1)));
6079 TREE_SET_CODE (varop, PREDECREMENT_EXPR);
6080
6081 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6082 && DECL_BIT_FIELD(TREE_OPERAND
6083 (TREE_OPERAND (varop, 0), 1)))
6084 {
6085 int size
6086 = TREE_INT_CST_LOW (DECL_SIZE
6087 (TREE_OPERAND
6088 (TREE_OPERAND (varop, 0), 1)));
6089 tree mask, unsigned_type;
6090 int precision;
6091 tree folded_compare;
6092
6093 if (constopnum == 0)
6094 folded_compare = fold (build (code, type, constop,
6095 TREE_OPERAND (varop, 0)));
6096 else
6097 folded_compare = fold (build (code, type,
6098 TREE_OPERAND (varop, 0),
6099 constop));
6100 if (integer_zerop (folded_compare)
6101 || integer_onep (folded_compare))
6102 return omit_one_operand (type, folded_compare, varop);
6103
6104 unsigned_type = type_for_size (size, 1);
6105 precision = TYPE_PRECISION (unsigned_type);
6106 mask = build_int_2 (~0, ~0);
6107 TREE_TYPE (mask) = TREE_TYPE (varop);
6108 force_fit_type (mask, 0);
6109 mask = const_binop (RSHIFT_EXPR, mask,
6110 size_int (precision - size), 0);
6111 newconst = fold (build (BIT_AND_EXPR,
6112 TREE_TYPE (varop), newconst,
6113 convert (TREE_TYPE (varop),
6114 mask)));
6115 }
6116
6117
6118 t = build (code, type, TREE_OPERAND (t, 0),
6119 TREE_OPERAND (t, 1));
6120 TREE_OPERAND (t, constopnum) = newconst;
6121 return t;
6122 }
6123 }
6124 }
6125
6126 /* Change X >= CST to X > (CST - 1) if CST is positive. */
6127 if (TREE_CODE (arg1) == INTEGER_CST
6128 && TREE_CODE (arg0) != INTEGER_CST
6129 && tree_int_cst_sgn (arg1) > 0)
6130 {
6131 switch (TREE_CODE (t))
6132 {
6133 case GE_EXPR:
6134 code = GT_EXPR;
6135 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6136 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6137 break;
6138
6139 case LT_EXPR:
6140 code = LE_EXPR;
6141 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6142 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6143 break;
6144
6145 default:
6146 break;
6147 }
6148 }
6149
6150 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
6151 a MINUS_EXPR of a constant, we can convert it into a comparison with
6152 a revised constant as long as no overflow occurs. */
6153 if ((code == EQ_EXPR || code == NE_EXPR)
6154 && TREE_CODE (arg1) == INTEGER_CST
6155 && (TREE_CODE (arg0) == PLUS_EXPR
6156 || TREE_CODE (arg0) == MINUS_EXPR)
6157 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6158 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6159 ? MINUS_EXPR : PLUS_EXPR,
6160 arg1, TREE_OPERAND (arg0, 1), 0))
6161 && ! TREE_CONSTANT_OVERFLOW (tem))
6162 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6163
6164 /* Similarly for a NEGATE_EXPR. */
6165 else if ((code == EQ_EXPR || code == NE_EXPR)
6166 && TREE_CODE (arg0) == NEGATE_EXPR
6167 && TREE_CODE (arg1) == INTEGER_CST
6168 && 0 != (tem = negate_expr (arg1))
6169 && TREE_CODE (tem) == INTEGER_CST
6170 && ! TREE_CONSTANT_OVERFLOW (tem))
6171 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6172
6173 /* If we have X - Y == 0, we can convert that to X == Y and similarly
6174 for !=. Don't do this for ordered comparisons due to overflow. */
6175 else if ((code == NE_EXPR || code == EQ_EXPR)
6176 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
6177 return fold (build (code, type,
6178 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
6179
6180 /* If we are widening one operand of an integer comparison,
6181 see if the other operand is similarly being widened. Perhaps we
6182 can do the comparison in the narrower type. */
6183 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
6184 && TREE_CODE (arg0) == NOP_EXPR
6185 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
6186 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
6187 && (TREE_TYPE (t1) == TREE_TYPE (tem)
6188 || (TREE_CODE (t1) == INTEGER_CST
6189 && int_fits_type_p (t1, TREE_TYPE (tem)))))
6190 return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
6191
6192 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
6193 constant, we can simplify it. */
6194 else if (TREE_CODE (arg1) == INTEGER_CST
6195 && (TREE_CODE (arg0) == MIN_EXPR
6196 || TREE_CODE (arg0) == MAX_EXPR)
6197 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6198 return optimize_minmax_comparison (t);
6199
6200 /* If we are comparing an ABS_EXPR with a constant, we can
6201 convert all the cases into explicit comparisons, but they may
6202 well not be faster than doing the ABS and one comparison.
6203 But ABS (X) <= C is a range comparison, which becomes a subtraction
6204 and a comparison, and is probably faster. */
6205 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6206 && TREE_CODE (arg0) == ABS_EXPR
6207 && ! TREE_SIDE_EFFECTS (arg0)
6208 && (0 != (tem = negate_expr (arg1)))
6209 && TREE_CODE (tem) == INTEGER_CST
6210 && ! TREE_CONSTANT_OVERFLOW (tem))
6211 return fold (build (TRUTH_ANDIF_EXPR, type,
6212 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
6213 build (LE_EXPR, type,
6214 TREE_OPERAND (arg0, 0), arg1)));
6215
6216 /* If this is an EQ or NE comparison with zero and ARG0 is
6217 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
6218 two operations, but the latter can be done in one less insn
6219 on machines that have only two-operand insns or on which a
6220 constant cannot be the first operand. */
6221 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
6222 && TREE_CODE (arg0) == BIT_AND_EXPR)
6223 {
6224 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
6225 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
6226 return
6227 fold (build (code, type,
6228 build (BIT_AND_EXPR, TREE_TYPE (arg0),
6229 build (RSHIFT_EXPR,
6230 TREE_TYPE (TREE_OPERAND (arg0, 0)),
6231 TREE_OPERAND (arg0, 1),
6232 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
6233 convert (TREE_TYPE (arg0),
6234 integer_one_node)),
6235 arg1));
6236 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
6237 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
6238 return
6239 fold (build (code, type,
6240 build (BIT_AND_EXPR, TREE_TYPE (arg0),
6241 build (RSHIFT_EXPR,
6242 TREE_TYPE (TREE_OPERAND (arg0, 1)),
6243 TREE_OPERAND (arg0, 0),
6244 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
6245 convert (TREE_TYPE (arg0),
6246 integer_one_node)),
6247 arg1));
6248 }
6249
6250 /* If this is an NE or EQ comparison of zero against the result of a
6251 signed MOD operation whose second operand is a power of 2, make
6252 the MOD operation unsigned since it is simpler and equivalent. */
6253 if ((code == NE_EXPR || code == EQ_EXPR)
6254 && integer_zerop (arg1)
6255 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
6256 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
6257 || TREE_CODE (arg0) == CEIL_MOD_EXPR
6258 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
6259 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
6260 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6261 {
6262 tree newtype = unsigned_type (TREE_TYPE (arg0));
6263 tree newmod = build (TREE_CODE (arg0), newtype,
6264 convert (newtype, TREE_OPERAND (arg0, 0)),
6265 convert (newtype, TREE_OPERAND (arg0, 1)));
6266
6267 return build (code, type, newmod, convert (newtype, arg1));
6268 }
6269
6270 /* If this is an NE comparison of zero with an AND of one, remove the
6271 comparison since the AND will give the correct value. */
6272 if (code == NE_EXPR && integer_zerop (arg1)
6273 && TREE_CODE (arg0) == BIT_AND_EXPR
6274 && integer_onep (TREE_OPERAND (arg0, 1)))
6275 return convert (type, arg0);
6276
6277 /* If we have (A & C) == C where C is a power of 2, convert this into
6278 (A & C) != 0. Similarly for NE_EXPR. */
6279 if ((code == EQ_EXPR || code == NE_EXPR)
6280 && TREE_CODE (arg0) == BIT_AND_EXPR
6281 && integer_pow2p (TREE_OPERAND (arg0, 1))
6282 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
6283 return build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
6284 arg0, integer_zero_node);
6285
6286 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
6287 and similarly for >= into !=. */
6288 if ((code == LT_EXPR || code == GE_EXPR)
6289 && TREE_UNSIGNED (TREE_TYPE (arg0))
6290 && TREE_CODE (arg1) == LSHIFT_EXPR
6291 && integer_onep (TREE_OPERAND (arg1, 0)))
6292 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6293 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6294 TREE_OPERAND (arg1, 1)),
6295 convert (TREE_TYPE (arg0), integer_zero_node));
6296
6297 else if ((code == LT_EXPR || code == GE_EXPR)
6298 && TREE_UNSIGNED (TREE_TYPE (arg0))
6299 && (TREE_CODE (arg1) == NOP_EXPR
6300 || TREE_CODE (arg1) == CONVERT_EXPR)
6301 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
6302 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
6303 return
6304 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6305 convert (TREE_TYPE (arg0),
6306 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6307 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
6308 convert (TREE_TYPE (arg0), integer_zero_node));
6309
6310 /* Simplify comparison of something with itself. (For IEEE
6311 floating-point, we can only do some of these simplifications.) */
6312 if (operand_equal_p (arg0, arg1, 0))
6313 {
6314 switch (code)
6315 {
6316 case EQ_EXPR:
6317 case GE_EXPR:
6318 case LE_EXPR:
6319 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6320 return constant_boolean_node (1, type);
6321 code = EQ_EXPR;
6322 TREE_SET_CODE (t, code);
6323 break;
6324
6325 case NE_EXPR:
6326 /* For NE, we can only do this simplification if integer. */
6327 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6328 break;
6329 /* ... fall through ... */
6330 case GT_EXPR:
6331 case LT_EXPR:
6332 return constant_boolean_node (0, type);
6333 default:
6334 abort ();
6335 }
6336 }
6337
6338 /* An unsigned comparison against 0 can be simplified. */
6339 if (integer_zerop (arg1)
6340 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6341 || POINTER_TYPE_P (TREE_TYPE (arg1)))
6342 && TREE_UNSIGNED (TREE_TYPE (arg1)))
6343 {
6344 switch (TREE_CODE (t))
6345 {
6346 case GT_EXPR:
6347 code = NE_EXPR;
6348 TREE_SET_CODE (t, NE_EXPR);
6349 break;
6350 case LE_EXPR:
6351 code = EQ_EXPR;
6352 TREE_SET_CODE (t, EQ_EXPR);
6353 break;
6354 case GE_EXPR:
6355 return omit_one_operand (type,
6356 convert (type, integer_one_node),
6357 arg0);
6358 case LT_EXPR:
6359 return omit_one_operand (type,
6360 convert (type, integer_zero_node),
6361 arg0);
6362 default:
6363 break;
6364 }
6365 }
6366
6367 /* Comparisons with the highest or lowest possible integer of
6368 the specified size will have known values and an unsigned
6369 <= 0x7fffffff can be simplified. */
6370 {
6371 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
6372
6373 if (TREE_CODE (arg1) == INTEGER_CST
6374 && ! TREE_CONSTANT_OVERFLOW (arg1)
6375 && width <= HOST_BITS_PER_WIDE_INT
6376 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6377 || POINTER_TYPE_P (TREE_TYPE (arg1))))
6378 {
6379 if (TREE_INT_CST_HIGH (arg1) == 0
6380 && (TREE_INT_CST_LOW (arg1)
6381 == ((HOST_WIDE_INT) 1 << (width - 1)) - 1)
6382 && ! TREE_UNSIGNED (TREE_TYPE (arg1)))
6383 switch (TREE_CODE (t))
6384 {
6385 case GT_EXPR:
6386 return omit_one_operand (type,
6387 convert (type, integer_zero_node),
6388 arg0);
6389 case GE_EXPR:
6390 TREE_SET_CODE (t, EQ_EXPR);
6391 break;
6392
6393 case LE_EXPR:
6394 return omit_one_operand (type,
6395 convert (type, integer_one_node),
6396 arg0);
6397 case LT_EXPR:
6398 TREE_SET_CODE (t, NE_EXPR);
6399 break;
6400
6401 default:
6402 break;
6403 }
6404
6405 else if (TREE_INT_CST_HIGH (arg1) == -1
6406 && (- TREE_INT_CST_LOW (arg1)
6407 == ((HOST_WIDE_INT) 1 << (width - 1)))
6408 && ! TREE_UNSIGNED (TREE_TYPE (arg1)))
6409 switch (TREE_CODE (t))
6410 {
6411 case LT_EXPR:
6412 return omit_one_operand (type,
6413 convert (type, integer_zero_node),
6414 arg0);
6415 case LE_EXPR:
6416 TREE_SET_CODE (t, EQ_EXPR);
6417 break;
6418
6419 case GE_EXPR:
6420 return omit_one_operand (type,
6421 convert (type, integer_one_node),
6422 arg0);
6423 case GT_EXPR:
6424 TREE_SET_CODE (t, NE_EXPR);
6425 break;
6426
6427 default:
6428 break;
6429 }
6430
6431 else if (TREE_INT_CST_HIGH (arg1) == 0
6432 && (TREE_INT_CST_LOW (arg1)
6433 == ((HOST_WIDE_INT) 1 << (width - 1)) - 1)
6434 && TREE_UNSIGNED (TREE_TYPE (arg1)))
6435
6436 switch (TREE_CODE (t))
6437 {
6438 case LE_EXPR:
6439 return fold (build (GE_EXPR, type,
6440 convert (signed_type (TREE_TYPE (arg0)),
6441 arg0),
6442 convert (signed_type (TREE_TYPE (arg1)),
6443 integer_zero_node)));
6444 case GT_EXPR:
6445 return fold (build (LT_EXPR, type,
6446 convert (signed_type (TREE_TYPE (arg0)),
6447 arg0),
6448 convert (signed_type (TREE_TYPE (arg1)),
6449 integer_zero_node)));
6450
6451 default:
6452 break;
6453 }
6454 }
6455 }
6456
6457 /* If we are comparing an expression that just has comparisons
6458 of two integer values, arithmetic expressions of those comparisons,
6459 and constants, we can simplify it. There are only three cases
6460 to check: the two values can either be equal, the first can be
6461 greater, or the second can be greater. Fold the expression for
6462 those three values. Since each value must be 0 or 1, we have
6463 eight possibilities, each of which corresponds to the constant 0
6464 or 1 or one of the six possible comparisons.
6465
6466 This handles common cases like (a > b) == 0 but also handles
6467 expressions like ((x > y) - (y > x)) > 0, which supposedly
6468 occur in macroized code. */
6469
6470 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
6471 {
6472 tree cval1 = 0, cval2 = 0;
6473 int save_p = 0;
6474
6475 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
6476 /* Don't handle degenerate cases here; they should already
6477 have been handled anyway. */
6478 && cval1 != 0 && cval2 != 0
6479 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
6480 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
6481 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
6482 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
6483 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
6484 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
6485 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
6486 {
6487 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
6488 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
6489
6490 /* We can't just pass T to eval_subst in case cval1 or cval2
6491 was the same as ARG1. */
6492
6493 tree high_result
6494 = fold (build (code, type,
6495 eval_subst (arg0, cval1, maxval, cval2, minval),
6496 arg1));
6497 tree equal_result
6498 = fold (build (code, type,
6499 eval_subst (arg0, cval1, maxval, cval2, maxval),
6500 arg1));
6501 tree low_result
6502 = fold (build (code, type,
6503 eval_subst (arg0, cval1, minval, cval2, maxval),
6504 arg1));
6505
6506 /* All three of these results should be 0 or 1. Confirm they
6507 are. Then use those values to select the proper code
6508 to use. */
6509
6510 if ((integer_zerop (high_result)
6511 || integer_onep (high_result))
6512 && (integer_zerop (equal_result)
6513 || integer_onep (equal_result))
6514 && (integer_zerop (low_result)
6515 || integer_onep (low_result)))
6516 {
6517 /* Make a 3-bit mask with the high-order bit being the
6518 value for `>', the next for '=', and the low for '<'. */
6519 switch ((integer_onep (high_result) * 4)
6520 + (integer_onep (equal_result) * 2)
6521 + integer_onep (low_result))
6522 {
6523 case 0:
6524 /* Always false. */
6525 return omit_one_operand (type, integer_zero_node, arg0);
6526 case 1:
6527 code = LT_EXPR;
6528 break;
6529 case 2:
6530 code = EQ_EXPR;
6531 break;
6532 case 3:
6533 code = LE_EXPR;
6534 break;
6535 case 4:
6536 code = GT_EXPR;
6537 break;
6538 case 5:
6539 code = NE_EXPR;
6540 break;
6541 case 6:
6542 code = GE_EXPR;
6543 break;
6544 case 7:
6545 /* Always true. */
6546 return omit_one_operand (type, integer_one_node, arg0);
6547 }
6548
6549 t = build (code, type, cval1, cval2);
6550 if (save_p)
6551 return save_expr (t);
6552 else
6553 return fold (t);
6554 }
6555 }
6556 }
6557
6558 /* If this is a comparison of a field, we may be able to simplify it. */
6559 if ((TREE_CODE (arg0) == COMPONENT_REF
6560 || TREE_CODE (arg0) == BIT_FIELD_REF)
6561 && (code == EQ_EXPR || code == NE_EXPR)
6562 /* Handle the constant case even without -O
6563 to make sure the warnings are given. */
6564 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
6565 {
6566 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
6567 return t1 ? t1 : t;
6568 }
6569
6570 /* If this is a comparison of complex values and either or both sides
6571 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
6572 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
6573 This may prevent needless evaluations. */
6574 if ((code == EQ_EXPR || code == NE_EXPR)
6575 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
6576 && (TREE_CODE (arg0) == COMPLEX_EXPR
6577 || TREE_CODE (arg1) == COMPLEX_EXPR
6578 || TREE_CODE (arg0) == COMPLEX_CST
6579 || TREE_CODE (arg1) == COMPLEX_CST))
6580 {
6581 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
6582 tree real0, imag0, real1, imag1;
6583
6584 arg0 = save_expr (arg0);
6585 arg1 = save_expr (arg1);
6586 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
6587 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
6588 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
6589 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
6590
6591 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
6592 : TRUTH_ORIF_EXPR),
6593 type,
6594 fold (build (code, type, real0, real1)),
6595 fold (build (code, type, imag0, imag1))));
6596 }
6597
6598 /* From here on, the only cases we handle are when the result is
6599 known to be a constant.
6600
6601 To compute GT, swap the arguments and do LT.
6602 To compute GE, do LT and invert the result.
6603 To compute LE, swap the arguments, do LT and invert the result.
6604 To compute NE, do EQ and invert the result.
6605
6606 Therefore, the code below must handle only EQ and LT. */
6607
6608 if (code == LE_EXPR || code == GT_EXPR)
6609 {
6610 tem = arg0, arg0 = arg1, arg1 = tem;
6611 code = swap_tree_comparison (code);
6612 }
6613
6614 /* Note that it is safe to invert for real values here because we
6615 will check below in the one case that it matters. */
6616
6617 t1 = NULL_TREE;
6618 invert = 0;
6619 if (code == NE_EXPR || code == GE_EXPR)
6620 {
6621 invert = 1;
6622 code = invert_tree_comparison (code);
6623 }
6624
6625 /* Compute a result for LT or EQ if args permit;
6626 otherwise return T. */
6627 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
6628 {
6629 if (code == EQ_EXPR)
6630 t1 = build_int_2 ((TREE_INT_CST_LOW (arg0)
6631 == TREE_INT_CST_LOW (arg1))
6632 && (TREE_INT_CST_HIGH (arg0)
6633 == TREE_INT_CST_HIGH (arg1)),
6634 0);
6635 else
6636 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
6637 ? INT_CST_LT_UNSIGNED (arg0, arg1)
6638 : INT_CST_LT (arg0, arg1)),
6639 0);
6640 }
6641
6642 #if 0 /* This is no longer useful, but breaks some real code. */
6643 /* Assume a nonexplicit constant cannot equal an explicit one,
6644 since such code would be undefined anyway.
6645 Exception: on sysvr4, using #pragma weak,
6646 a label can come out as 0. */
6647 else if (TREE_CODE (arg1) == INTEGER_CST
6648 && !integer_zerop (arg1)
6649 && TREE_CONSTANT (arg0)
6650 && TREE_CODE (arg0) == ADDR_EXPR
6651 && code == EQ_EXPR)
6652 t1 = build_int_2 (0, 0);
6653 #endif
6654 /* Two real constants can be compared explicitly. */
6655 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
6656 {
6657 /* If either operand is a NaN, the result is false with two
6658 exceptions: First, an NE_EXPR is true on NaNs, but that case
6659 is already handled correctly since we will be inverting the
6660 result for NE_EXPR. Second, if we had inverted a LE_EXPR
6661 or a GE_EXPR into a LT_EXPR, we must return true so that it
6662 will be inverted into false. */
6663
6664 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
6665 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
6666 t1 = build_int_2 (invert && code == LT_EXPR, 0);
6667
6668 else if (code == EQ_EXPR)
6669 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
6670 TREE_REAL_CST (arg1)),
6671 0);
6672 else
6673 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
6674 TREE_REAL_CST (arg1)),
6675 0);
6676 }
6677
6678 if (t1 == NULL_TREE)
6679 return t;
6680
6681 if (invert)
6682 TREE_INT_CST_LOW (t1) ^= 1;
6683
6684 TREE_TYPE (t1) = type;
6685 if (TREE_CODE (type) == BOOLEAN_TYPE)
6686 return truthvalue_conversion (t1);
6687 return t1;
6688
6689 case COND_EXPR:
6690 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
6691 so all simple results must be passed through pedantic_non_lvalue. */
6692 if (TREE_CODE (arg0) == INTEGER_CST)
6693 return pedantic_non_lvalue
6694 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
6695 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
6696 return pedantic_omit_one_operand (type, arg1, arg0);
6697
6698 /* If the second operand is zero, invert the comparison and swap
6699 the second and third operands. Likewise if the second operand
6700 is constant and the third is not or if the third operand is
6701 equivalent to the first operand of the comparison. */
6702
6703 if (integer_zerop (arg1)
6704 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
6705 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6706 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6707 TREE_OPERAND (t, 2),
6708 TREE_OPERAND (arg0, 1))))
6709 {
6710 /* See if this can be inverted. If it can't, possibly because
6711 it was a floating-point inequality comparison, don't do
6712 anything. */
6713 tem = invert_truthvalue (arg0);
6714
6715 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
6716 {
6717 t = build (code, type, tem,
6718 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
6719 arg0 = tem;
6720 /* arg1 should be the first argument of the new T. */
6721 arg1 = TREE_OPERAND (t, 1);
6722 STRIP_NOPS (arg1);
6723 }
6724 }
6725
6726 /* If we have A op B ? A : C, we may be able to convert this to a
6727 simpler expression, depending on the operation and the values
6728 of B and C. IEEE floating point prevents this though,
6729 because A or B might be -0.0 or a NaN. */
6730
6731 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6732 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
6733 || ! FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
6734 || flag_fast_math)
6735 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6736 arg1, TREE_OPERAND (arg0, 1)))
6737 {
6738 tree arg2 = TREE_OPERAND (t, 2);
6739 enum tree_code comp_code = TREE_CODE (arg0);
6740
6741 STRIP_NOPS (arg2);
6742
6743 /* If we have A op 0 ? A : -A, this is A, -A, abs (A), or abs (-A),
6744 depending on the comparison operation. */
6745 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
6746 ? real_zerop (TREE_OPERAND (arg0, 1))
6747 : integer_zerop (TREE_OPERAND (arg0, 1)))
6748 && TREE_CODE (arg2) == NEGATE_EXPR
6749 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
6750 switch (comp_code)
6751 {
6752 case EQ_EXPR:
6753 return pedantic_non_lvalue (negate_expr (arg1));
6754 case NE_EXPR:
6755 return pedantic_non_lvalue (convert (type, arg1));
6756 case GE_EXPR:
6757 case GT_EXPR:
6758 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6759 arg1 = convert (signed_type (TREE_TYPE (arg1)), arg1);
6760 return pedantic_non_lvalue
6761 (convert (type, fold (build1 (ABS_EXPR,
6762 TREE_TYPE (arg1), arg1))));
6763 case LE_EXPR:
6764 case LT_EXPR:
6765 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6766 arg1 = convert (signed_type (TREE_TYPE (arg1)), arg1);
6767 return pedantic_non_lvalue
6768 (negate_expr (convert (type,
6769 fold (build1 (ABS_EXPR,
6770 TREE_TYPE (arg1),
6771 arg1)))));
6772 default:
6773 abort ();
6774 }
6775
6776 /* If this is A != 0 ? A : 0, this is simply A. For ==, it is
6777 always zero. */
6778
6779 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
6780 {
6781 if (comp_code == NE_EXPR)
6782 return pedantic_non_lvalue (convert (type, arg1));
6783 else if (comp_code == EQ_EXPR)
6784 return pedantic_non_lvalue (convert (type, integer_zero_node));
6785 }
6786
6787 /* If this is A op B ? A : B, this is either A, B, min (A, B),
6788 or max (A, B), depending on the operation. */
6789
6790 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
6791 arg2, TREE_OPERAND (arg0, 0)))
6792 {
6793 tree comp_op0 = TREE_OPERAND (arg0, 0);
6794 tree comp_op1 = TREE_OPERAND (arg0, 1);
6795 tree comp_type = TREE_TYPE (comp_op0);
6796
6797 switch (comp_code)
6798 {
6799 case EQ_EXPR:
6800 return pedantic_non_lvalue (convert (type, arg2));
6801 case NE_EXPR:
6802 return pedantic_non_lvalue (convert (type, arg1));
6803 case LE_EXPR:
6804 case LT_EXPR:
6805 /* In C++ a ?: expression can be an lvalue, so put the
6806 operand which will be used if they are equal first
6807 so that we can convert this back to the
6808 corresponding COND_EXPR. */
6809 return pedantic_non_lvalue
6810 (convert (type, (fold (build (MIN_EXPR, comp_type,
6811 (comp_code == LE_EXPR
6812 ? comp_op0 : comp_op1),
6813 (comp_code == LE_EXPR
6814 ? comp_op1 : comp_op0))))));
6815 break;
6816 case GE_EXPR:
6817 case GT_EXPR:
6818 return pedantic_non_lvalue
6819 (convert (type, fold (build (MAX_EXPR, comp_type,
6820 (comp_code == GE_EXPR
6821 ? comp_op0 : comp_op1),
6822 (comp_code == GE_EXPR
6823 ? comp_op1 : comp_op0)))));
6824 break;
6825 default:
6826 abort ();
6827 }
6828 }
6829
6830 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
6831 we might still be able to simplify this. For example,
6832 if C1 is one less or one more than C2, this might have started
6833 out as a MIN or MAX and been transformed by this function.
6834 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
6835
6836 if (INTEGRAL_TYPE_P (type)
6837 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6838 && TREE_CODE (arg2) == INTEGER_CST)
6839 switch (comp_code)
6840 {
6841 case EQ_EXPR:
6842 /* We can replace A with C1 in this case. */
6843 arg1 = convert (type, TREE_OPERAND (arg0, 1));
6844 t = build (code, type, TREE_OPERAND (t, 0), arg1,
6845 TREE_OPERAND (t, 2));
6846 break;
6847
6848 case LT_EXPR:
6849 /* If C1 is C2 + 1, this is min(A, C2). */
6850 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6851 && operand_equal_p (TREE_OPERAND (arg0, 1),
6852 const_binop (PLUS_EXPR, arg2,
6853 integer_one_node, 0), 1))
6854 return pedantic_non_lvalue
6855 (fold (build (MIN_EXPR, type, arg1, arg2)));
6856 break;
6857
6858 case LE_EXPR:
6859 /* If C1 is C2 - 1, this is min(A, C2). */
6860 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
6861 && operand_equal_p (TREE_OPERAND (arg0, 1),
6862 const_binop (MINUS_EXPR, arg2,
6863 integer_one_node, 0), 1))
6864 return pedantic_non_lvalue
6865 (fold (build (MIN_EXPR, type, arg1, arg2)));
6866 break;
6867
6868 case GT_EXPR:
6869 /* If C1 is C2 - 1, this is max(A, C2). */
6870 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
6871 && operand_equal_p (TREE_OPERAND (arg0, 1),
6872 const_binop (MINUS_EXPR, arg2,
6873 integer_one_node, 0), 1))
6874 return pedantic_non_lvalue
6875 (fold (build (MAX_EXPR, type, arg1, arg2)));
6876 break;
6877
6878 case GE_EXPR:
6879 /* If C1 is C2 + 1, this is max(A, C2). */
6880 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6881 && operand_equal_p (TREE_OPERAND (arg0, 1),
6882 const_binop (PLUS_EXPR, arg2,
6883 integer_one_node, 0), 1))
6884 return pedantic_non_lvalue
6885 (fold (build (MAX_EXPR, type, arg1, arg2)));
6886 break;
6887 case NE_EXPR:
6888 break;
6889 default:
6890 abort ();
6891 }
6892 }
6893
6894 /* If the second operand is simpler than the third, swap them
6895 since that produces better jump optimization results. */
6896 if ((TREE_CONSTANT (arg1) || TREE_CODE_CLASS (TREE_CODE (arg1)) == 'd'
6897 || TREE_CODE (arg1) == SAVE_EXPR)
6898 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
6899 || TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (t, 2))) == 'd'
6900 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
6901 {
6902 /* See if this can be inverted. If it can't, possibly because
6903 it was a floating-point inequality comparison, don't do
6904 anything. */
6905 tem = invert_truthvalue (arg0);
6906
6907 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
6908 {
6909 t = build (code, type, tem,
6910 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
6911 arg0 = tem;
6912 /* arg1 should be the first argument of the new T. */
6913 arg1 = TREE_OPERAND (t, 1);
6914 STRIP_NOPS (arg1);
6915 }
6916 }
6917
6918 /* Convert A ? 1 : 0 to simply A. */
6919 if (integer_onep (TREE_OPERAND (t, 1))
6920 && integer_zerop (TREE_OPERAND (t, 2))
6921 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
6922 call to fold will try to move the conversion inside
6923 a COND, which will recurse. In that case, the COND_EXPR
6924 is probably the best choice, so leave it alone. */
6925 && type == TREE_TYPE (arg0))
6926 return pedantic_non_lvalue (arg0);
6927
6928 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
6929 operation is simply A & 2. */
6930
6931 if (integer_zerop (TREE_OPERAND (t, 2))
6932 && TREE_CODE (arg0) == NE_EXPR
6933 && integer_zerop (TREE_OPERAND (arg0, 1))
6934 && integer_pow2p (arg1)
6935 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
6936 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
6937 arg1, 1))
6938 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
6939
6940 return t;
6941
6942 case COMPOUND_EXPR:
6943 /* When pedantic, a compound expression can be neither an lvalue
6944 nor an integer constant expression. */
6945 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
6946 return t;
6947 /* Don't let (0, 0) be null pointer constant. */
6948 if (integer_zerop (arg1))
6949 return build1 (NOP_EXPR, TREE_TYPE (arg1), arg1);
6950 return arg1;
6951
6952 case COMPLEX_EXPR:
6953 if (wins)
6954 return build_complex (type, arg0, arg1);
6955 return t;
6956
6957 case REALPART_EXPR:
6958 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6959 return t;
6960 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6961 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
6962 TREE_OPERAND (arg0, 1));
6963 else if (TREE_CODE (arg0) == COMPLEX_CST)
6964 return TREE_REALPART (arg0);
6965 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6966 return fold (build (TREE_CODE (arg0), type,
6967 fold (build1 (REALPART_EXPR, type,
6968 TREE_OPERAND (arg0, 0))),
6969 fold (build1 (REALPART_EXPR,
6970 type, TREE_OPERAND (arg0, 1)))));
6971 return t;
6972
6973 case IMAGPART_EXPR:
6974 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6975 return convert (type, integer_zero_node);
6976 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6977 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
6978 TREE_OPERAND (arg0, 0));
6979 else if (TREE_CODE (arg0) == COMPLEX_CST)
6980 return TREE_IMAGPART (arg0);
6981 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6982 return fold (build (TREE_CODE (arg0), type,
6983 fold (build1 (IMAGPART_EXPR, type,
6984 TREE_OPERAND (arg0, 0))),
6985 fold (build1 (IMAGPART_EXPR, type,
6986 TREE_OPERAND (arg0, 1)))));
6987 return t;
6988
6989 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
6990 appropriate. */
6991 case CLEANUP_POINT_EXPR:
6992 if (! has_cleanups (arg0))
6993 return TREE_OPERAND (t, 0);
6994
6995 {
6996 enum tree_code code0 = TREE_CODE (arg0);
6997 int kind0 = TREE_CODE_CLASS (code0);
6998 tree arg00 = TREE_OPERAND (arg0, 0);
6999 tree arg01;
7000
7001 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
7002 return fold (build1 (code0, type,
7003 fold (build1 (CLEANUP_POINT_EXPR,
7004 TREE_TYPE (arg00), arg00))));
7005
7006 if (kind0 == '<' || kind0 == '2'
7007 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
7008 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
7009 || code0 == TRUTH_XOR_EXPR)
7010 {
7011 arg01 = TREE_OPERAND (arg0, 1);
7012
7013 if (TREE_CONSTANT (arg00)
7014 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
7015 && ! has_cleanups (arg00)))
7016 return fold (build (code0, type, arg00,
7017 fold (build1 (CLEANUP_POINT_EXPR,
7018 TREE_TYPE (arg01), arg01))));
7019
7020 if (TREE_CONSTANT (arg01))
7021 return fold (build (code0, type,
7022 fold (build1 (CLEANUP_POINT_EXPR,
7023 TREE_TYPE (arg00), arg00)),
7024 arg01));
7025 }
7026
7027 return t;
7028 }
7029
7030 default:
7031 return t;
7032 } /* switch (code) */
7033 }
7034
7035 /* Determine if first argument is a multiple of second argument. Return 0 if
7036 it is not, or we cannot easily determined it to be.
7037
7038 An example of the sort of thing we care about (at this point; this routine
7039 could surely be made more general, and expanded to do what the *_DIV_EXPR's
7040 fold cases do now) is discovering that
7041
7042 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7043
7044 is a multiple of
7045
7046 SAVE_EXPR (J * 8)
7047
7048 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
7049
7050 This code also handles discovering that
7051
7052 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7053
7054 is a multiple of 8 so we don't have to worry about dealing with a
7055 possible remainder.
7056
7057 Note that we *look* inside a SAVE_EXPR only to determine how it was
7058 calculated; it is not safe for fold to do much of anything else with the
7059 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
7060 at run time. For example, the latter example above *cannot* be implemented
7061 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
7062 evaluation time of the original SAVE_EXPR is not necessarily the same at
7063 the time the new expression is evaluated. The only optimization of this
7064 sort that would be valid is changing
7065
7066 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
7067
7068 divided by 8 to
7069
7070 SAVE_EXPR (I) * SAVE_EXPR (J)
7071
7072 (where the same SAVE_EXPR (J) is used in the original and the
7073 transformed version). */
7074
7075 static int
7076 multiple_of_p (type, top, bottom)
7077 tree type;
7078 tree top;
7079 tree bottom;
7080 {
7081 if (operand_equal_p (top, bottom, 0))
7082 return 1;
7083
7084 if (TREE_CODE (type) != INTEGER_TYPE)
7085 return 0;
7086
7087 switch (TREE_CODE (top))
7088 {
7089 case MULT_EXPR:
7090 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7091 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7092
7093 case PLUS_EXPR:
7094 case MINUS_EXPR:
7095 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7096 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7097
7098 case NOP_EXPR:
7099 /* Can't handle conversions from non-integral or wider integral type. */
7100 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
7101 || (TYPE_PRECISION (type)
7102 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
7103 return 0;
7104
7105 /* .. fall through ... */
7106
7107 case SAVE_EXPR:
7108 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
7109
7110 case INTEGER_CST:
7111 if ((TREE_CODE (bottom) != INTEGER_CST)
7112 || (tree_int_cst_sgn (top) < 0)
7113 || (tree_int_cst_sgn (bottom) < 0))
7114 return 0;
7115 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
7116 top, bottom, 0));
7117
7118 default:
7119 return 0;
7120 }
7121 }