fold-const.c (fold): Before optimizing unsigned comparison with 0x7fffffffU...
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
44
45 #include "config.h"
46 #include "system.h"
47 #include <setjmp.h>
48 #include "flags.h"
49 #include "tree.h"
50 #include "rtl.h"
51 #include "expr.h"
52 #include "tm_p.h"
53 #include "toplev.h"
54 #include "ggc.h"
55
56 static void encode PARAMS ((HOST_WIDE_INT *,
57 unsigned HOST_WIDE_INT,
58 HOST_WIDE_INT));
59 static void decode PARAMS ((HOST_WIDE_INT *,
60 unsigned HOST_WIDE_INT *,
61 HOST_WIDE_INT *));
62 static tree negate_expr PARAMS ((tree));
63 static tree split_tree PARAMS ((tree, enum tree_code, tree *, tree *,
64 int));
65 static tree associate_trees PARAMS ((tree, tree, enum tree_code, tree));
66 static tree int_const_binop PARAMS ((enum tree_code, tree, tree, int, int));
67 static void const_binop_1 PARAMS ((PTR));
68 static tree const_binop PARAMS ((enum tree_code, tree, tree, int));
69 static void fold_convert_1 PARAMS ((PTR));
70 static tree fold_convert PARAMS ((tree, tree));
71 static enum tree_code invert_tree_comparison PARAMS ((enum tree_code));
72 static enum tree_code swap_tree_comparison PARAMS ((enum tree_code));
73 static int truth_value_p PARAMS ((enum tree_code));
74 static int operand_equal_for_comparison_p PARAMS ((tree, tree, tree));
75 static int twoval_comparison_p PARAMS ((tree, tree *, tree *, int *));
76 static tree eval_subst PARAMS ((tree, tree, tree, tree, tree));
77 static tree omit_one_operand PARAMS ((tree, tree, tree));
78 static tree pedantic_omit_one_operand PARAMS ((tree, tree, tree));
79 static tree distribute_bit_expr PARAMS ((enum tree_code, tree, tree, tree));
80 static tree make_bit_field_ref PARAMS ((tree, tree, int, int, int));
81 static tree optimize_bit_field_compare PARAMS ((enum tree_code, tree,
82 tree, tree));
83 static tree decode_field_reference PARAMS ((tree, HOST_WIDE_INT *,
84 HOST_WIDE_INT *,
85 enum machine_mode *, int *,
86 int *, tree *, tree *));
87 static int all_ones_mask_p PARAMS ((tree, int));
88 static int simple_operand_p PARAMS ((tree));
89 static tree range_binop PARAMS ((enum tree_code, tree, tree, int,
90 tree, int));
91 static tree make_range PARAMS ((tree, int *, tree *, tree *));
92 static tree build_range_check PARAMS ((tree, tree, int, tree, tree));
93 static int merge_ranges PARAMS ((int *, tree *, tree *, int, tree, tree,
94 int, tree, tree));
95 static tree fold_range_test PARAMS ((tree));
96 static tree unextend PARAMS ((tree, int, int, tree));
97 static tree fold_truthop PARAMS ((enum tree_code, tree, tree, tree));
98 static tree optimize_minmax_comparison PARAMS ((tree));
99 static tree extract_muldiv PARAMS ((tree, tree, enum tree_code, tree));
100 static tree strip_compound_expr PARAMS ((tree, tree));
101 static int multiple_of_p PARAMS ((tree, tree, tree));
102 static tree constant_boolean_node PARAMS ((int, tree));
103 static int count_cond PARAMS ((tree, int));
104 static tree fold_binary_op_with_conditional_arg
105 PARAMS ((enum tree_code, tree, tree, tree, int));
106
107 #ifndef BRANCH_COST
108 #define BRANCH_COST 1
109 #endif
110
111 #if defined(HOST_EBCDIC)
112 /* bit 8 is significant in EBCDIC */
113 #define CHARMASK 0xff
114 #else
115 #define CHARMASK 0x7f
116 #endif
117
118 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
119 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
120 and SUM1. Then this yields nonzero if overflow occurred during the
121 addition.
122
123 Overflow occurs if A and B have the same sign, but A and SUM differ in
124 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
125 sign. */
126 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
127 \f
128 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
129 We do that by representing the two-word integer in 4 words, with only
130 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
131 number. The value of the word is LOWPART + HIGHPART * BASE. */
132
133 #define LOWPART(x) \
134 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
135 #define HIGHPART(x) \
136 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
137 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
138
139 /* Unpack a two-word integer into 4 words.
140 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
141 WORDS points to the array of HOST_WIDE_INTs. */
142
143 static void
144 encode (words, low, hi)
145 HOST_WIDE_INT *words;
146 unsigned HOST_WIDE_INT low;
147 HOST_WIDE_INT hi;
148 {
149 words[0] = LOWPART (low);
150 words[1] = HIGHPART (low);
151 words[2] = LOWPART (hi);
152 words[3] = HIGHPART (hi);
153 }
154
155 /* Pack an array of 4 words into a two-word integer.
156 WORDS points to the array of words.
157 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
158
159 static void
160 decode (words, low, hi)
161 HOST_WIDE_INT *words;
162 unsigned HOST_WIDE_INT *low;
163 HOST_WIDE_INT *hi;
164 {
165 *low = words[0] + words[1] * BASE;
166 *hi = words[2] + words[3] * BASE;
167 }
168 \f
169 /* Make the integer constant T valid for its type by setting to 0 or 1 all
170 the bits in the constant that don't belong in the type.
171
172 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
173 nonzero, a signed overflow has already occurred in calculating T, so
174 propagate it.
175
176 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
177 if it exists. */
178
179 int
180 force_fit_type (t, overflow)
181 tree t;
182 int overflow;
183 {
184 unsigned HOST_WIDE_INT low;
185 HOST_WIDE_INT high;
186 unsigned int prec;
187
188 if (TREE_CODE (t) == REAL_CST)
189 {
190 #ifdef CHECK_FLOAT_VALUE
191 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t)), TREE_REAL_CST (t),
192 overflow);
193 #endif
194 return overflow;
195 }
196
197 else if (TREE_CODE (t) != INTEGER_CST)
198 return overflow;
199
200 low = TREE_INT_CST_LOW (t);
201 high = TREE_INT_CST_HIGH (t);
202
203 if (POINTER_TYPE_P (TREE_TYPE (t)))
204 prec = POINTER_SIZE;
205 else
206 prec = TYPE_PRECISION (TREE_TYPE (t));
207
208 /* First clear all bits that are beyond the type's precision. */
209
210 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
211 ;
212 else if (prec > HOST_BITS_PER_WIDE_INT)
213 TREE_INT_CST_HIGH (t)
214 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
215 else
216 {
217 TREE_INT_CST_HIGH (t) = 0;
218 if (prec < HOST_BITS_PER_WIDE_INT)
219 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
220 }
221
222 /* Unsigned types do not suffer sign extension or overflow unless they
223 are a sizetype. */
224 if (TREE_UNSIGNED (TREE_TYPE (t))
225 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
226 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
227 return overflow;
228
229 /* If the value's sign bit is set, extend the sign. */
230 if (prec != 2 * HOST_BITS_PER_WIDE_INT
231 && (prec > HOST_BITS_PER_WIDE_INT
232 ? 0 != (TREE_INT_CST_HIGH (t)
233 & ((HOST_WIDE_INT) 1
234 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
235 : 0 != (TREE_INT_CST_LOW (t)
236 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
237 {
238 /* Value is negative:
239 set to 1 all the bits that are outside this type's precision. */
240 if (prec > HOST_BITS_PER_WIDE_INT)
241 TREE_INT_CST_HIGH (t)
242 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
243 else
244 {
245 TREE_INT_CST_HIGH (t) = -1;
246 if (prec < HOST_BITS_PER_WIDE_INT)
247 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
248 }
249 }
250
251 /* Return nonzero if signed overflow occurred. */
252 return
253 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
254 != 0);
255 }
256 \f
257 /* Add two doubleword integers with doubleword result.
258 Each argument is given as two `HOST_WIDE_INT' pieces.
259 One argument is L1 and H1; the other, L2 and H2.
260 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
261
262 int
263 add_double (l1, h1, l2, h2, lv, hv)
264 unsigned HOST_WIDE_INT l1, l2;
265 HOST_WIDE_INT h1, h2;
266 unsigned HOST_WIDE_INT *lv;
267 HOST_WIDE_INT *hv;
268 {
269 unsigned HOST_WIDE_INT l;
270 HOST_WIDE_INT h;
271
272 l = l1 + l2;
273 h = h1 + h2 + (l < l1);
274
275 *lv = l;
276 *hv = h;
277 return OVERFLOW_SUM_SIGN (h1, h2, h);
278 }
279
280 /* Negate a doubleword integer with doubleword result.
281 Return nonzero if the operation overflows, assuming it's signed.
282 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
283 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
284
285 int
286 neg_double (l1, h1, lv, hv)
287 unsigned HOST_WIDE_INT l1;
288 HOST_WIDE_INT h1;
289 unsigned HOST_WIDE_INT *lv;
290 HOST_WIDE_INT *hv;
291 {
292 if (l1 == 0)
293 {
294 *lv = 0;
295 *hv = - h1;
296 return (*hv & h1) < 0;
297 }
298 else
299 {
300 *lv = -l1;
301 *hv = ~h1;
302 return 0;
303 }
304 }
305 \f
306 /* Multiply two doubleword integers with doubleword result.
307 Return nonzero if the operation overflows, assuming it's signed.
308 Each argument is given as two `HOST_WIDE_INT' pieces.
309 One argument is L1 and H1; the other, L2 and H2.
310 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
311
312 int
313 mul_double (l1, h1, l2, h2, lv, hv)
314 unsigned HOST_WIDE_INT l1, l2;
315 HOST_WIDE_INT h1, h2;
316 unsigned HOST_WIDE_INT *lv;
317 HOST_WIDE_INT *hv;
318 {
319 HOST_WIDE_INT arg1[4];
320 HOST_WIDE_INT arg2[4];
321 HOST_WIDE_INT prod[4 * 2];
322 register unsigned HOST_WIDE_INT carry;
323 register int i, j, k;
324 unsigned HOST_WIDE_INT toplow, neglow;
325 HOST_WIDE_INT tophigh, neghigh;
326
327 encode (arg1, l1, h1);
328 encode (arg2, l2, h2);
329
330 memset ((char *) prod, 0, sizeof prod);
331
332 for (i = 0; i < 4; i++)
333 {
334 carry = 0;
335 for (j = 0; j < 4; j++)
336 {
337 k = i + j;
338 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
339 carry += arg1[i] * arg2[j];
340 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
341 carry += prod[k];
342 prod[k] = LOWPART (carry);
343 carry = HIGHPART (carry);
344 }
345 prod[i + 4] = carry;
346 }
347
348 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
349
350 /* Check for overflow by calculating the top half of the answer in full;
351 it should agree with the low half's sign bit. */
352 decode (prod + 4, &toplow, &tophigh);
353 if (h1 < 0)
354 {
355 neg_double (l2, h2, &neglow, &neghigh);
356 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
357 }
358 if (h2 < 0)
359 {
360 neg_double (l1, h1, &neglow, &neghigh);
361 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
362 }
363 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
364 }
365 \f
366 /* Shift the doubleword integer in L1, H1 left by COUNT places
367 keeping only PREC bits of result.
368 Shift right if COUNT is negative.
369 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
370 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
371
372 void
373 lshift_double (l1, h1, count, prec, lv, hv, arith)
374 unsigned HOST_WIDE_INT l1;
375 HOST_WIDE_INT h1, count;
376 unsigned int prec;
377 unsigned HOST_WIDE_INT *lv;
378 HOST_WIDE_INT *hv;
379 int arith;
380 {
381 if (count < 0)
382 {
383 rshift_double (l1, h1, -count, prec, lv, hv, arith);
384 return;
385 }
386
387 #ifdef SHIFT_COUNT_TRUNCATED
388 if (SHIFT_COUNT_TRUNCATED)
389 count %= prec;
390 #endif
391
392 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
393 {
394 /* Shifting by the host word size is undefined according to the
395 ANSI standard, so we must handle this as a special case. */
396 *hv = 0;
397 *lv = 0;
398 }
399 else if (count >= HOST_BITS_PER_WIDE_INT)
400 {
401 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
402 *lv = 0;
403 }
404 else
405 {
406 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
407 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
408 *lv = l1 << count;
409 }
410 }
411
412 /* Shift the doubleword integer in L1, H1 right by COUNT places
413 keeping only PREC bits of result. COUNT must be positive.
414 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
415 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
416
417 void
418 rshift_double (l1, h1, count, prec, lv, hv, arith)
419 unsigned HOST_WIDE_INT l1;
420 HOST_WIDE_INT h1, count;
421 unsigned int prec ATTRIBUTE_UNUSED;
422 unsigned HOST_WIDE_INT *lv;
423 HOST_WIDE_INT *hv;
424 int arith;
425 {
426 unsigned HOST_WIDE_INT signmask;
427
428 signmask = (arith
429 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
430 : 0);
431
432 #ifdef SHIFT_COUNT_TRUNCATED
433 if (SHIFT_COUNT_TRUNCATED)
434 count %= prec;
435 #endif
436
437 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
438 {
439 /* Shifting by the host word size is undefined according to the
440 ANSI standard, so we must handle this as a special case. */
441 *hv = signmask;
442 *lv = signmask;
443 }
444 else if (count >= HOST_BITS_PER_WIDE_INT)
445 {
446 *hv = signmask;
447 *lv = ((signmask << (2 * HOST_BITS_PER_WIDE_INT - count - 1) << 1)
448 | ((unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT)));
449 }
450 else
451 {
452 *lv = ((l1 >> count)
453 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
454 *hv = ((signmask << (HOST_BITS_PER_WIDE_INT - count))
455 | ((unsigned HOST_WIDE_INT) h1 >> count));
456 }
457 }
458 \f
459 /* Rotate the doubleword integer in L1, H1 left by COUNT places
460 keeping only PREC bits of result.
461 Rotate right if COUNT is negative.
462 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
463
464 void
465 lrotate_double (l1, h1, count, prec, lv, hv)
466 unsigned HOST_WIDE_INT l1;
467 HOST_WIDE_INT h1, count;
468 unsigned int prec;
469 unsigned HOST_WIDE_INT *lv;
470 HOST_WIDE_INT *hv;
471 {
472 unsigned HOST_WIDE_INT s1l, s2l;
473 HOST_WIDE_INT s1h, s2h;
474
475 count %= prec;
476 if (count < 0)
477 count += prec;
478
479 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
480 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
481 *lv = s1l | s2l;
482 *hv = s1h | s2h;
483 }
484
485 /* Rotate the doubleword integer in L1, H1 left by COUNT places
486 keeping only PREC bits of result. COUNT must be positive.
487 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
488
489 void
490 rrotate_double (l1, h1, count, prec, lv, hv)
491 unsigned HOST_WIDE_INT l1;
492 HOST_WIDE_INT h1, count;
493 unsigned int prec;
494 unsigned HOST_WIDE_INT *lv;
495 HOST_WIDE_INT *hv;
496 {
497 unsigned HOST_WIDE_INT s1l, s2l;
498 HOST_WIDE_INT s1h, s2h;
499
500 count %= prec;
501 if (count < 0)
502 count += prec;
503
504 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
505 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
506 *lv = s1l | s2l;
507 *hv = s1h | s2h;
508 }
509 \f
510 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
511 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
512 CODE is a tree code for a kind of division, one of
513 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
514 or EXACT_DIV_EXPR
515 It controls how the quotient is rounded to a integer.
516 Return nonzero if the operation overflows.
517 UNS nonzero says do unsigned division. */
518
519 int
520 div_and_round_double (code, uns,
521 lnum_orig, hnum_orig, lden_orig, hden_orig,
522 lquo, hquo, lrem, hrem)
523 enum tree_code code;
524 int uns;
525 unsigned HOST_WIDE_INT lnum_orig; /* num == numerator == dividend */
526 HOST_WIDE_INT hnum_orig;
527 unsigned HOST_WIDE_INT lden_orig; /* den == denominator == divisor */
528 HOST_WIDE_INT hden_orig;
529 unsigned HOST_WIDE_INT *lquo, *lrem;
530 HOST_WIDE_INT *hquo, *hrem;
531 {
532 int quo_neg = 0;
533 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
534 HOST_WIDE_INT den[4], quo[4];
535 register int i, j;
536 unsigned HOST_WIDE_INT work;
537 unsigned HOST_WIDE_INT carry = 0;
538 unsigned HOST_WIDE_INT lnum = lnum_orig;
539 HOST_WIDE_INT hnum = hnum_orig;
540 unsigned HOST_WIDE_INT lden = lden_orig;
541 HOST_WIDE_INT hden = hden_orig;
542 int overflow = 0;
543
544 if (hden == 0 && lden == 0)
545 overflow = 1, lden = 1;
546
547 /* calculate quotient sign and convert operands to unsigned. */
548 if (!uns)
549 {
550 if (hnum < 0)
551 {
552 quo_neg = ~ quo_neg;
553 /* (minimum integer) / (-1) is the only overflow case. */
554 if (neg_double (lnum, hnum, &lnum, &hnum)
555 && ((HOST_WIDE_INT) lden & hden) == -1)
556 overflow = 1;
557 }
558 if (hden < 0)
559 {
560 quo_neg = ~ quo_neg;
561 neg_double (lden, hden, &lden, &hden);
562 }
563 }
564
565 if (hnum == 0 && hden == 0)
566 { /* single precision */
567 *hquo = *hrem = 0;
568 /* This unsigned division rounds toward zero. */
569 *lquo = lnum / lden;
570 goto finish_up;
571 }
572
573 if (hnum == 0)
574 { /* trivial case: dividend < divisor */
575 /* hden != 0 already checked. */
576 *hquo = *lquo = 0;
577 *hrem = hnum;
578 *lrem = lnum;
579 goto finish_up;
580 }
581
582 memset ((char *) quo, 0, sizeof quo);
583
584 memset ((char *) num, 0, sizeof num); /* to zero 9th element */
585 memset ((char *) den, 0, sizeof den);
586
587 encode (num, lnum, hnum);
588 encode (den, lden, hden);
589
590 /* Special code for when the divisor < BASE. */
591 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
592 {
593 /* hnum != 0 already checked. */
594 for (i = 4 - 1; i >= 0; i--)
595 {
596 work = num[i] + carry * BASE;
597 quo[i] = work / lden;
598 carry = work % lden;
599 }
600 }
601 else
602 {
603 /* Full double precision division,
604 with thanks to Don Knuth's "Seminumerical Algorithms". */
605 int num_hi_sig, den_hi_sig;
606 unsigned HOST_WIDE_INT quo_est, scale;
607
608 /* Find the highest non-zero divisor digit. */
609 for (i = 4 - 1;; i--)
610 if (den[i] != 0)
611 {
612 den_hi_sig = i;
613 break;
614 }
615
616 /* Insure that the first digit of the divisor is at least BASE/2.
617 This is required by the quotient digit estimation algorithm. */
618
619 scale = BASE / (den[den_hi_sig] + 1);
620 if (scale > 1)
621 { /* scale divisor and dividend */
622 carry = 0;
623 for (i = 0; i <= 4 - 1; i++)
624 {
625 work = (num[i] * scale) + carry;
626 num[i] = LOWPART (work);
627 carry = HIGHPART (work);
628 }
629
630 num[4] = carry;
631 carry = 0;
632 for (i = 0; i <= 4 - 1; i++)
633 {
634 work = (den[i] * scale) + carry;
635 den[i] = LOWPART (work);
636 carry = HIGHPART (work);
637 if (den[i] != 0) den_hi_sig = i;
638 }
639 }
640
641 num_hi_sig = 4;
642
643 /* Main loop */
644 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
645 {
646 /* Guess the next quotient digit, quo_est, by dividing the first
647 two remaining dividend digits by the high order quotient digit.
648 quo_est is never low and is at most 2 high. */
649 unsigned HOST_WIDE_INT tmp;
650
651 num_hi_sig = i + den_hi_sig + 1;
652 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
653 if (num[num_hi_sig] != den[den_hi_sig])
654 quo_est = work / den[den_hi_sig];
655 else
656 quo_est = BASE - 1;
657
658 /* Refine quo_est so it's usually correct, and at most one high. */
659 tmp = work - quo_est * den[den_hi_sig];
660 if (tmp < BASE
661 && (den[den_hi_sig - 1] * quo_est
662 > (tmp * BASE + num[num_hi_sig - 2])))
663 quo_est--;
664
665 /* Try QUO_EST as the quotient digit, by multiplying the
666 divisor by QUO_EST and subtracting from the remaining dividend.
667 Keep in mind that QUO_EST is the I - 1st digit. */
668
669 carry = 0;
670 for (j = 0; j <= den_hi_sig; j++)
671 {
672 work = quo_est * den[j] + carry;
673 carry = HIGHPART (work);
674 work = num[i + j] - LOWPART (work);
675 num[i + j] = LOWPART (work);
676 carry += HIGHPART (work) != 0;
677 }
678
679 /* If quo_est was high by one, then num[i] went negative and
680 we need to correct things. */
681 if (num[num_hi_sig] < carry)
682 {
683 quo_est--;
684 carry = 0; /* add divisor back in */
685 for (j = 0; j <= den_hi_sig; j++)
686 {
687 work = num[i + j] + den[j] + carry;
688 carry = HIGHPART (work);
689 num[i + j] = LOWPART (work);
690 }
691
692 num [num_hi_sig] += carry;
693 }
694
695 /* Store the quotient digit. */
696 quo[i] = quo_est;
697 }
698 }
699
700 decode (quo, lquo, hquo);
701
702 finish_up:
703 /* if result is negative, make it so. */
704 if (quo_neg)
705 neg_double (*lquo, *hquo, lquo, hquo);
706
707 /* compute trial remainder: rem = num - (quo * den) */
708 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
709 neg_double (*lrem, *hrem, lrem, hrem);
710 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
711
712 switch (code)
713 {
714 case TRUNC_DIV_EXPR:
715 case TRUNC_MOD_EXPR: /* round toward zero */
716 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
717 return overflow;
718
719 case FLOOR_DIV_EXPR:
720 case FLOOR_MOD_EXPR: /* round toward negative infinity */
721 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
722 {
723 /* quo = quo - 1; */
724 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
725 lquo, hquo);
726 }
727 else
728 return overflow;
729 break;
730
731 case CEIL_DIV_EXPR:
732 case CEIL_MOD_EXPR: /* round toward positive infinity */
733 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
734 {
735 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
736 lquo, hquo);
737 }
738 else
739 return overflow;
740 break;
741
742 case ROUND_DIV_EXPR:
743 case ROUND_MOD_EXPR: /* round to closest integer */
744 {
745 unsigned HOST_WIDE_INT labs_rem = *lrem;
746 HOST_WIDE_INT habs_rem = *hrem;
747 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
748 HOST_WIDE_INT habs_den = hden, htwice;
749
750 /* Get absolute values */
751 if (*hrem < 0)
752 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
753 if (hden < 0)
754 neg_double (lden, hden, &labs_den, &habs_den);
755
756 /* If (2 * abs (lrem) >= abs (lden)) */
757 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
758 labs_rem, habs_rem, &ltwice, &htwice);
759
760 if (((unsigned HOST_WIDE_INT) habs_den
761 < (unsigned HOST_WIDE_INT) htwice)
762 || (((unsigned HOST_WIDE_INT) habs_den
763 == (unsigned HOST_WIDE_INT) htwice)
764 && (labs_den < ltwice)))
765 {
766 if (*hquo < 0)
767 /* quo = quo - 1; */
768 add_double (*lquo, *hquo,
769 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
770 else
771 /* quo = quo + 1; */
772 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
773 lquo, hquo);
774 }
775 else
776 return overflow;
777 }
778 break;
779
780 default:
781 abort ();
782 }
783
784 /* compute true remainder: rem = num - (quo * den) */
785 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
786 neg_double (*lrem, *hrem, lrem, hrem);
787 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
788 return overflow;
789 }
790 \f
791 #ifndef REAL_ARITHMETIC
792 /* Effectively truncate a real value to represent the nearest possible value
793 in a narrower mode. The result is actually represented in the same data
794 type as the argument, but its value is usually different.
795
796 A trap may occur during the FP operations and it is the responsibility
797 of the calling function to have a handler established. */
798
799 REAL_VALUE_TYPE
800 real_value_truncate (mode, arg)
801 enum machine_mode mode;
802 REAL_VALUE_TYPE arg;
803 {
804 return REAL_VALUE_TRUNCATE (mode, arg);
805 }
806
807 #if TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
808
809 /* Check for infinity in an IEEE double precision number. */
810
811 int
812 target_isinf (x)
813 REAL_VALUE_TYPE x;
814 {
815 /* The IEEE 64-bit double format. */
816 union {
817 REAL_VALUE_TYPE d;
818 struct {
819 unsigned sign : 1;
820 unsigned exponent : 11;
821 unsigned mantissa1 : 20;
822 unsigned mantissa2;
823 } little_endian;
824 struct {
825 unsigned mantissa2;
826 unsigned mantissa1 : 20;
827 unsigned exponent : 11;
828 unsigned sign : 1;
829 } big_endian;
830 } u;
831
832 u.d = dconstm1;
833 if (u.big_endian.sign == 1)
834 {
835 u.d = x;
836 return (u.big_endian.exponent == 2047
837 && u.big_endian.mantissa1 == 0
838 && u.big_endian.mantissa2 == 0);
839 }
840 else
841 {
842 u.d = x;
843 return (u.little_endian.exponent == 2047
844 && u.little_endian.mantissa1 == 0
845 && u.little_endian.mantissa2 == 0);
846 }
847 }
848
849 /* Check whether an IEEE double precision number is a NaN. */
850
851 int
852 target_isnan (x)
853 REAL_VALUE_TYPE x;
854 {
855 /* The IEEE 64-bit double format. */
856 union {
857 REAL_VALUE_TYPE d;
858 struct {
859 unsigned sign : 1;
860 unsigned exponent : 11;
861 unsigned mantissa1 : 20;
862 unsigned mantissa2;
863 } little_endian;
864 struct {
865 unsigned mantissa2;
866 unsigned mantissa1 : 20;
867 unsigned exponent : 11;
868 unsigned sign : 1;
869 } big_endian;
870 } u;
871
872 u.d = dconstm1;
873 if (u.big_endian.sign == 1)
874 {
875 u.d = x;
876 return (u.big_endian.exponent == 2047
877 && (u.big_endian.mantissa1 != 0
878 || u.big_endian.mantissa2 != 0));
879 }
880 else
881 {
882 u.d = x;
883 return (u.little_endian.exponent == 2047
884 && (u.little_endian.mantissa1 != 0
885 || u.little_endian.mantissa2 != 0));
886 }
887 }
888
889 /* Check for a negative IEEE double precision number. */
890
891 int
892 target_negative (x)
893 REAL_VALUE_TYPE x;
894 {
895 /* The IEEE 64-bit double format. */
896 union {
897 REAL_VALUE_TYPE d;
898 struct {
899 unsigned sign : 1;
900 unsigned exponent : 11;
901 unsigned mantissa1 : 20;
902 unsigned mantissa2;
903 } little_endian;
904 struct {
905 unsigned mantissa2;
906 unsigned mantissa1 : 20;
907 unsigned exponent : 11;
908 unsigned sign : 1;
909 } big_endian;
910 } u;
911
912 u.d = dconstm1;
913 if (u.big_endian.sign == 1)
914 {
915 u.d = x;
916 return u.big_endian.sign;
917 }
918 else
919 {
920 u.d = x;
921 return u.little_endian.sign;
922 }
923 }
924 #else /* Target not IEEE */
925
926 /* Let's assume other float formats don't have infinity.
927 (This can be overridden by redefining REAL_VALUE_ISINF.) */
928
929 int
930 target_isinf (x)
931 REAL_VALUE_TYPE x ATTRIBUTE_UNUSED;
932 {
933 return 0;
934 }
935
936 /* Let's assume other float formats don't have NaNs.
937 (This can be overridden by redefining REAL_VALUE_ISNAN.) */
938
939 int
940 target_isnan (x)
941 REAL_VALUE_TYPE x ATTRIBUTE_UNUSED;
942 {
943 return 0;
944 }
945
946 /* Let's assume other float formats don't have minus zero.
947 (This can be overridden by redefining REAL_VALUE_NEGATIVE.) */
948
949 int
950 target_negative (x)
951 REAL_VALUE_TYPE x;
952 {
953 return x < 0;
954 }
955 #endif /* Target not IEEE */
956
957 /* Try to change R into its exact multiplicative inverse in machine mode
958 MODE. Return nonzero function value if successful. */
959
960 int
961 exact_real_inverse (mode, r)
962 enum machine_mode mode;
963 REAL_VALUE_TYPE *r;
964 {
965 jmp_buf float_error;
966 union
967 {
968 double d;
969 unsigned short i[4];
970 }x, t, y;
971 #ifdef CHECK_FLOAT_VALUE
972 int i;
973 #endif
974
975 /* Usually disable if bounds checks are not reliable. */
976 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT) && !flag_pretend_float)
977 return 0;
978
979 /* Set array index to the less significant bits in the unions, depending
980 on the endian-ness of the host doubles.
981 Disable if insufficient information on the data structure. */
982 #if HOST_FLOAT_FORMAT == UNKNOWN_FLOAT_FORMAT
983 return 0;
984 #else
985 #if HOST_FLOAT_FORMAT == VAX_FLOAT_FORMAT
986 #define K 2
987 #else
988 #if HOST_FLOAT_FORMAT == IBM_FLOAT_FORMAT
989 #define K 2
990 #else
991 #define K (2 * HOST_FLOAT_WORDS_BIG_ENDIAN)
992 #endif
993 #endif
994 #endif
995
996 if (setjmp (float_error))
997 {
998 /* Don't do the optimization if there was an arithmetic error. */
999 fail:
1000 set_float_handler (NULL_PTR);
1001 return 0;
1002 }
1003 set_float_handler (float_error);
1004
1005 /* Domain check the argument. */
1006 x.d = *r;
1007 if (x.d == 0.0)
1008 goto fail;
1009
1010 #ifdef REAL_INFINITY
1011 if (REAL_VALUE_ISINF (x.d) || REAL_VALUE_ISNAN (x.d))
1012 goto fail;
1013 #endif
1014
1015 /* Compute the reciprocal and check for numerical exactness.
1016 It is unnecessary to check all the significand bits to determine
1017 whether X is a power of 2. If X is not, then it is impossible for
1018 the bottom half significand of both X and 1/X to be all zero bits.
1019 Hence we ignore the data structure of the top half and examine only
1020 the low order bits of the two significands. */
1021 t.d = 1.0 / x.d;
1022 if (x.i[K] != 0 || x.i[K + 1] != 0 || t.i[K] != 0 || t.i[K + 1] != 0)
1023 goto fail;
1024
1025 /* Truncate to the required mode and range-check the result. */
1026 y.d = REAL_VALUE_TRUNCATE (mode, t.d);
1027 #ifdef CHECK_FLOAT_VALUE
1028 i = 0;
1029 if (CHECK_FLOAT_VALUE (mode, y.d, i))
1030 goto fail;
1031 #endif
1032
1033 /* Fail if truncation changed the value. */
1034 if (y.d != t.d || y.d == 0.0)
1035 goto fail;
1036
1037 #ifdef REAL_INFINITY
1038 if (REAL_VALUE_ISINF (y.d) || REAL_VALUE_ISNAN (y.d))
1039 goto fail;
1040 #endif
1041
1042 /* Output the reciprocal and return success flag. */
1043 set_float_handler (NULL_PTR);
1044 *r = y.d;
1045 return 1;
1046 }
1047
1048 /* Convert C99 hexadecimal floating point string constant S. Return
1049 real value type in mode MODE. This function uses the host computer's
1050 floating point arithmetic when there is no REAL_ARITHMETIC. */
1051
1052 REAL_VALUE_TYPE
1053 real_hex_to_f (s, mode)
1054 char *s;
1055 enum machine_mode mode;
1056 {
1057 REAL_VALUE_TYPE ip;
1058 char *p = s;
1059 unsigned HOST_WIDE_INT low, high;
1060 int shcount, nrmcount, k;
1061 int sign, expsign, isfloat;
1062 int lost = 0;/* Nonzero low order bits shifted out and discarded. */
1063 int frexpon = 0; /* Bits after the decimal point. */
1064 int expon = 0; /* Value of exponent. */
1065 int decpt = 0; /* How many decimal points. */
1066 int gotp = 0; /* How many P's. */
1067 char c;
1068
1069 isfloat = 0;
1070 expsign = 1;
1071 ip = 0.0;
1072
1073 while (*p == ' ' || *p == '\t')
1074 ++p;
1075
1076 /* Sign, if any, comes first. */
1077 sign = 1;
1078 if (*p == '-')
1079 {
1080 sign = -1;
1081 ++p;
1082 }
1083
1084 /* The string is supposed to start with 0x or 0X . */
1085 if (*p == '0')
1086 {
1087 ++p;
1088 if (*p == 'x' || *p == 'X')
1089 ++p;
1090 else
1091 abort ();
1092 }
1093 else
1094 abort ();
1095
1096 while (*p == '0')
1097 ++p;
1098
1099 high = 0;
1100 low = 0;
1101 shcount = 0;
1102 while ((c = *p) != '\0')
1103 {
1104 if ((c >= '0' && c <= '9') || (c >= 'A' && c <= 'F')
1105 || (c >= 'a' && c <= 'f'))
1106 {
1107 k = c & CHARMASK;
1108 if (k >= 'a' && k <= 'f')
1109 k = k - 'a' + 10;
1110 else if (k >= 'A')
1111 k = k - 'A' + 10;
1112 else
1113 k = k - '0';
1114
1115 if ((high & 0xf0000000) == 0)
1116 {
1117 high = (high << 4) + ((low >> 28) & 15);
1118 low = (low << 4) + k;
1119 shcount += 4;
1120 if (decpt)
1121 frexpon += 4;
1122 }
1123 else
1124 {
1125 /* Record nonzero lost bits. */
1126 lost |= k;
1127 if (! decpt)
1128 frexpon -= 4;
1129 }
1130 ++p;
1131 }
1132 else if (c == '.')
1133 {
1134 ++decpt;
1135 ++p;
1136 }
1137
1138 else if (c == 'p' || c == 'P')
1139 {
1140 ++gotp;
1141 ++p;
1142 /* Sign of exponent. */
1143 if (*p == '-')
1144 {
1145 expsign = -1;
1146 ++p;
1147 }
1148
1149 /* Value of exponent.
1150 The exponent field is a decimal integer. */
1151 while (ISDIGIT (*p))
1152 {
1153 k = (*p++ & CHARMASK) - '0';
1154 expon = 10 * expon + k;
1155 }
1156
1157 expon *= expsign;
1158 /* F suffix is ambiguous in the significand part
1159 so it must appear after the decimal exponent field. */
1160 if (*p == 'f' || *p == 'F')
1161 {
1162 isfloat = 1;
1163 ++p;
1164 break;
1165 }
1166 }
1167
1168 else if (c == 'l' || c == 'L')
1169 {
1170 ++p;
1171 break;
1172 }
1173 else
1174 break;
1175 }
1176
1177 /* Abort if last character read was not legitimate. */
1178 c = *p;
1179 if ((c != '\0' && c != ' ' && c != '\n' && c != '\r') || (decpt > 1))
1180 abort ();
1181
1182 /* There must be either one decimal point or one p. */
1183 if (decpt == 0 && gotp == 0)
1184 abort ();
1185
1186 shcount -= 4;
1187 if (high == 0 && low == 0)
1188 return dconst0;
1189
1190 /* Normalize. */
1191 nrmcount = 0;
1192 if (high == 0)
1193 {
1194 high = low;
1195 low = 0;
1196 nrmcount += 32;
1197 }
1198
1199 /* Leave a high guard bit for carry-out. */
1200 if ((high & 0x80000000) != 0)
1201 {
1202 lost |= low & 1;
1203 low = (low >> 1) | (high << 31);
1204 high = high >> 1;
1205 nrmcount -= 1;
1206 }
1207
1208 if ((high & 0xffff8000) == 0)
1209 {
1210 high = (high << 16) + ((low >> 16) & 0xffff);
1211 low = low << 16;
1212 nrmcount += 16;
1213 }
1214
1215 while ((high & 0xc0000000) == 0)
1216 {
1217 high = (high << 1) + ((low >> 31) & 1);
1218 low = low << 1;
1219 nrmcount += 1;
1220 }
1221
1222 if (isfloat || GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1223 {
1224 /* Keep 24 bits precision, bits 0x7fffff80.
1225 Rounding bit is 0x40. */
1226 lost = lost | low | (high & 0x3f);
1227 low = 0;
1228 if (high & 0x40)
1229 {
1230 if ((high & 0x80) || lost)
1231 high += 0x40;
1232 }
1233 high &= 0xffffff80;
1234 }
1235 else
1236 {
1237 /* We need real.c to do long double formats, so here default
1238 to double precision. */
1239 #if HOST_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
1240 /* IEEE double.
1241 Keep 53 bits precision, bits 0x7fffffff fffffc00.
1242 Rounding bit is low word 0x200. */
1243 lost = lost | (low & 0x1ff);
1244 if (low & 0x200)
1245 {
1246 if ((low & 0x400) || lost)
1247 {
1248 low = (low + 0x200) & 0xfffffc00;
1249 if (low == 0)
1250 high += 1;
1251 }
1252 }
1253 low &= 0xfffffc00;
1254 #else
1255 /* Assume it's a VAX with 56-bit significand,
1256 bits 0x7fffffff ffffff80. */
1257 lost = lost | (low & 0x7f);
1258 if (low & 0x40)
1259 {
1260 if ((low & 0x80) || lost)
1261 {
1262 low = (low + 0x40) & 0xffffff80;
1263 if (low == 0)
1264 high += 1;
1265 }
1266 }
1267 low &= 0xffffff80;
1268 #endif
1269 }
1270
1271 ip = (double) high;
1272 ip = REAL_VALUE_LDEXP (ip, 32) + (double) low;
1273 /* Apply shifts and exponent value as power of 2. */
1274 ip = REAL_VALUE_LDEXP (ip, expon - (nrmcount + frexpon));
1275
1276 if (sign < 0)
1277 ip = -ip;
1278 return ip;
1279 }
1280
1281 #endif /* no REAL_ARITHMETIC */
1282 \f
1283 /* Given T, an expression, return the negation of T. Allow for T to be
1284 null, in which case return null. */
1285
1286 static tree
1287 negate_expr (t)
1288 tree t;
1289 {
1290 tree type;
1291 tree tem;
1292
1293 if (t == 0)
1294 return 0;
1295
1296 type = TREE_TYPE (t);
1297 STRIP_SIGN_NOPS (t);
1298
1299 switch (TREE_CODE (t))
1300 {
1301 case INTEGER_CST:
1302 case REAL_CST:
1303 if (! TREE_UNSIGNED (type)
1304 && 0 != (tem = fold (build1 (NEGATE_EXPR, type, t)))
1305 && ! TREE_OVERFLOW (tem))
1306 return tem;
1307 break;
1308
1309 case NEGATE_EXPR:
1310 return convert (type, TREE_OPERAND (t, 0));
1311
1312 case MINUS_EXPR:
1313 /* - (A - B) -> B - A */
1314 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1315 return convert (type,
1316 fold (build (MINUS_EXPR, TREE_TYPE (t),
1317 TREE_OPERAND (t, 1),
1318 TREE_OPERAND (t, 0))));
1319 break;
1320
1321 default:
1322 break;
1323 }
1324
1325 return convert (type, build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1326 }
1327 \f
1328 /* Split a tree IN into a constant, literal and variable parts that could be
1329 combined with CODE to make IN. "constant" means an expression with
1330 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1331 commutative arithmetic operation. Store the constant part into *CONP,
1332 the literal in &LITP and return the variable part. If a part isn't
1333 present, set it to null. If the tree does not decompose in this way,
1334 return the entire tree as the variable part and the other parts as null.
1335
1336 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1337 case, we negate an operand that was subtracted. If NEGATE_P is true, we
1338 are negating all of IN.
1339
1340 If IN is itself a literal or constant, return it as appropriate.
1341
1342 Note that we do not guarantee that any of the three values will be the
1343 same type as IN, but they will have the same signedness and mode. */
1344
1345 static tree
1346 split_tree (in, code, conp, litp, negate_p)
1347 tree in;
1348 enum tree_code code;
1349 tree *conp, *litp;
1350 int negate_p;
1351 {
1352 tree var = 0;
1353
1354 *conp = 0;
1355 *litp = 0;
1356
1357 /* Strip any conversions that don't change the machine mode or signedness. */
1358 STRIP_SIGN_NOPS (in);
1359
1360 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1361 *litp = in;
1362 else if (TREE_CODE (in) == code
1363 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1364 /* We can associate addition and subtraction together (even
1365 though the C standard doesn't say so) for integers because
1366 the value is not affected. For reals, the value might be
1367 affected, so we can't. */
1368 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1369 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1370 {
1371 tree op0 = TREE_OPERAND (in, 0);
1372 tree op1 = TREE_OPERAND (in, 1);
1373 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1374 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1375
1376 /* First see if either of the operands is a literal, then a constant. */
1377 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1378 *litp = op0, op0 = 0;
1379 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1380 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1381
1382 if (op0 != 0 && TREE_CONSTANT (op0))
1383 *conp = op0, op0 = 0;
1384 else if (op1 != 0 && TREE_CONSTANT (op1))
1385 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1386
1387 /* If we haven't dealt with either operand, this is not a case we can
1388 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1389 if (op0 != 0 && op1 != 0)
1390 var = in;
1391 else if (op0 != 0)
1392 var = op0;
1393 else
1394 var = op1, neg_var_p = neg1_p;
1395
1396 /* Now do any needed negations. */
1397 if (neg_litp_p) *litp = negate_expr (*litp);
1398 if (neg_conp_p) *conp = negate_expr (*conp);
1399 if (neg_var_p) var = negate_expr (var);
1400 }
1401 else if (TREE_CONSTANT (in))
1402 *conp = in;
1403 else
1404 var = in;
1405
1406 if (negate_p)
1407 {
1408 var = negate_expr (var);
1409 *conp = negate_expr (*conp);
1410 *litp = negate_expr (*litp);
1411 }
1412
1413 return var;
1414 }
1415
1416 /* Re-associate trees split by the above function. T1 and T2 are either
1417 expressions to associate or null. Return the new expression, if any. If
1418 we build an operation, do it in TYPE and with CODE, except if CODE is a
1419 MINUS_EXPR, in which case we use PLUS_EXPR since split_tree will already
1420 have taken care of the negations. */
1421
1422 static tree
1423 associate_trees (t1, t2, code, type)
1424 tree t1, t2;
1425 enum tree_code code;
1426 tree type;
1427 {
1428 if (t1 == 0)
1429 return t2;
1430 else if (t2 == 0)
1431 return t1;
1432
1433 if (code == MINUS_EXPR)
1434 code = PLUS_EXPR;
1435
1436 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1437 try to fold this since we will have infinite recursion. But do
1438 deal with any NEGATE_EXPRs. */
1439 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1440 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1441 {
1442 if (TREE_CODE (t1) == NEGATE_EXPR)
1443 return build (MINUS_EXPR, type, convert (type, t2),
1444 convert (type, TREE_OPERAND (t1, 0)));
1445 else if (TREE_CODE (t2) == NEGATE_EXPR)
1446 return build (MINUS_EXPR, type, convert (type, t1),
1447 convert (type, TREE_OPERAND (t2, 0)));
1448 else
1449 return build (code, type, convert (type, t1), convert (type, t2));
1450 }
1451
1452 return fold (build (code, type, convert (type, t1), convert (type, t2)));
1453 }
1454 \f
1455 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1456 to produce a new constant.
1457
1458 If NOTRUNC is nonzero, do not truncate the result to fit the data type.
1459 If FORSIZE is nonzero, compute overflow for unsigned types. */
1460
1461 static tree
1462 int_const_binop (code, arg1, arg2, notrunc, forsize)
1463 enum tree_code code;
1464 register tree arg1, arg2;
1465 int notrunc, forsize;
1466 {
1467 unsigned HOST_WIDE_INT int1l, int2l;
1468 HOST_WIDE_INT int1h, int2h;
1469 unsigned HOST_WIDE_INT low;
1470 HOST_WIDE_INT hi;
1471 unsigned HOST_WIDE_INT garbagel;
1472 HOST_WIDE_INT garbageh;
1473 register tree t;
1474 int uns = TREE_UNSIGNED (TREE_TYPE (arg1));
1475 int overflow = 0;
1476 int no_overflow = 0;
1477
1478 int1l = TREE_INT_CST_LOW (arg1);
1479 int1h = TREE_INT_CST_HIGH (arg1);
1480 int2l = TREE_INT_CST_LOW (arg2);
1481 int2h = TREE_INT_CST_HIGH (arg2);
1482
1483 switch (code)
1484 {
1485 case BIT_IOR_EXPR:
1486 low = int1l | int2l, hi = int1h | int2h;
1487 break;
1488
1489 case BIT_XOR_EXPR:
1490 low = int1l ^ int2l, hi = int1h ^ int2h;
1491 break;
1492
1493 case BIT_AND_EXPR:
1494 low = int1l & int2l, hi = int1h & int2h;
1495 break;
1496
1497 case BIT_ANDTC_EXPR:
1498 low = int1l & ~int2l, hi = int1h & ~int2h;
1499 break;
1500
1501 case RSHIFT_EXPR:
1502 int2l = -int2l;
1503 case LSHIFT_EXPR:
1504 /* It's unclear from the C standard whether shifts can overflow.
1505 The following code ignores overflow; perhaps a C standard
1506 interpretation ruling is needed. */
1507 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (TREE_TYPE (arg1)),
1508 &low, &hi, !uns);
1509 no_overflow = 1;
1510 break;
1511
1512 case RROTATE_EXPR:
1513 int2l = - int2l;
1514 case LROTATE_EXPR:
1515 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (TREE_TYPE (arg1)),
1516 &low, &hi);
1517 break;
1518
1519 case PLUS_EXPR:
1520 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1521 break;
1522
1523 case MINUS_EXPR:
1524 neg_double (int2l, int2h, &low, &hi);
1525 add_double (int1l, int1h, low, hi, &low, &hi);
1526 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1527 break;
1528
1529 case MULT_EXPR:
1530 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1531 break;
1532
1533 case TRUNC_DIV_EXPR:
1534 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1535 case EXACT_DIV_EXPR:
1536 /* This is a shortcut for a common special case. */
1537 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1538 && ! TREE_CONSTANT_OVERFLOW (arg1)
1539 && ! TREE_CONSTANT_OVERFLOW (arg2)
1540 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1541 {
1542 if (code == CEIL_DIV_EXPR)
1543 int1l += int2l - 1;
1544
1545 low = int1l / int2l, hi = 0;
1546 break;
1547 }
1548
1549 /* ... fall through ... */
1550
1551 case ROUND_DIV_EXPR:
1552 if (int2h == 0 && int2l == 1)
1553 {
1554 low = int1l, hi = int1h;
1555 break;
1556 }
1557 if (int1l == int2l && int1h == int2h
1558 && ! (int1l == 0 && int1h == 0))
1559 {
1560 low = 1, hi = 0;
1561 break;
1562 }
1563 overflow = div_and_round_double (code, uns,
1564 int1l, int1h, int2l, int2h,
1565 &low, &hi, &garbagel, &garbageh);
1566 break;
1567
1568 case TRUNC_MOD_EXPR:
1569 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1570 /* This is a shortcut for a common special case. */
1571 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1572 && ! TREE_CONSTANT_OVERFLOW (arg1)
1573 && ! TREE_CONSTANT_OVERFLOW (arg2)
1574 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1575 {
1576 if (code == CEIL_MOD_EXPR)
1577 int1l += int2l - 1;
1578 low = int1l % int2l, hi = 0;
1579 break;
1580 }
1581
1582 /* ... fall through ... */
1583
1584 case ROUND_MOD_EXPR:
1585 overflow = div_and_round_double (code, uns,
1586 int1l, int1h, int2l, int2h,
1587 &garbagel, &garbageh, &low, &hi);
1588 break;
1589
1590 case MIN_EXPR:
1591 case MAX_EXPR:
1592 if (uns)
1593 low = (((unsigned HOST_WIDE_INT) int1h
1594 < (unsigned HOST_WIDE_INT) int2h)
1595 || (((unsigned HOST_WIDE_INT) int1h
1596 == (unsigned HOST_WIDE_INT) int2h)
1597 && int1l < int2l));
1598 else
1599 low = (int1h < int2h
1600 || (int1h == int2h && int1l < int2l));
1601
1602 if (low == (code == MIN_EXPR))
1603 low = int1l, hi = int1h;
1604 else
1605 low = int2l, hi = int2h;
1606 break;
1607
1608 default:
1609 abort ();
1610 }
1611
1612 if (forsize && hi == 0 && low < 10000
1613 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1614 return size_int_type_wide (low, TREE_TYPE (arg1));
1615 else
1616 {
1617 t = build_int_2 (low, hi);
1618 TREE_TYPE (t) = TREE_TYPE (arg1);
1619 }
1620
1621 TREE_OVERFLOW (t)
1622 = ((notrunc ? (!uns || forsize) && overflow
1623 : force_fit_type (t, (!uns || forsize) && overflow) && ! no_overflow)
1624 | TREE_OVERFLOW (arg1)
1625 | TREE_OVERFLOW (arg2));
1626
1627 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1628 So check if force_fit_type truncated the value. */
1629 if (forsize
1630 && ! TREE_OVERFLOW (t)
1631 && (TREE_INT_CST_HIGH (t) != hi
1632 || TREE_INT_CST_LOW (t) != low))
1633 TREE_OVERFLOW (t) = 1;
1634
1635 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1636 | TREE_CONSTANT_OVERFLOW (arg1)
1637 | TREE_CONSTANT_OVERFLOW (arg2));
1638 return t;
1639 }
1640
1641 /* Define input and output argument for const_binop_1. */
1642 struct cb_args
1643 {
1644 enum tree_code code; /* Input: tree code for operation. */
1645 tree type; /* Input: tree type for operation. */
1646 REAL_VALUE_TYPE d1, d2; /* Input: floating point operands. */
1647 tree t; /* Output: constant for result. */
1648 };
1649
1650 /* Do the real arithmetic for const_binop while protected by a
1651 float overflow handler. */
1652
1653 static void
1654 const_binop_1 (data)
1655 PTR data;
1656 {
1657 struct cb_args *args = (struct cb_args *) data;
1658 REAL_VALUE_TYPE value;
1659
1660 #ifdef REAL_ARITHMETIC
1661 REAL_ARITHMETIC (value, args->code, args->d1, args->d2);
1662 #else
1663 switch (args->code)
1664 {
1665 case PLUS_EXPR:
1666 value = args->d1 + args->d2;
1667 break;
1668
1669 case MINUS_EXPR:
1670 value = args->d1 - args->d2;
1671 break;
1672
1673 case MULT_EXPR:
1674 value = args->d1 * args->d2;
1675 break;
1676
1677 case RDIV_EXPR:
1678 #ifndef REAL_INFINITY
1679 if (args->d2 == 0)
1680 abort ();
1681 #endif
1682
1683 value = args->d1 / args->d2;
1684 break;
1685
1686 case MIN_EXPR:
1687 value = MIN (args->d1, args->d2);
1688 break;
1689
1690 case MAX_EXPR:
1691 value = MAX (args->d1, args->d2);
1692 break;
1693
1694 default:
1695 abort ();
1696 }
1697 #endif /* no REAL_ARITHMETIC */
1698
1699 args->t
1700 = build_real (args->type,
1701 real_value_truncate (TYPE_MODE (args->type), value));
1702 }
1703
1704 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1705 constant. We assume ARG1 and ARG2 have the same data type, or at least
1706 are the same kind of constant and the same machine mode.
1707
1708 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1709
1710 static tree
1711 const_binop (code, arg1, arg2, notrunc)
1712 enum tree_code code;
1713 register tree arg1, arg2;
1714 int notrunc;
1715 {
1716 STRIP_NOPS (arg1);
1717 STRIP_NOPS (arg2);
1718
1719 if (TREE_CODE (arg1) == INTEGER_CST)
1720 return int_const_binop (code, arg1, arg2, notrunc, 0);
1721
1722 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1723 if (TREE_CODE (arg1) == REAL_CST)
1724 {
1725 REAL_VALUE_TYPE d1;
1726 REAL_VALUE_TYPE d2;
1727 int overflow = 0;
1728 tree t;
1729 struct cb_args args;
1730
1731 d1 = TREE_REAL_CST (arg1);
1732 d2 = TREE_REAL_CST (arg2);
1733
1734 /* If either operand is a NaN, just return it. Otherwise, set up
1735 for floating-point trap; we return an overflow. */
1736 if (REAL_VALUE_ISNAN (d1))
1737 return arg1;
1738 else if (REAL_VALUE_ISNAN (d2))
1739 return arg2;
1740
1741 /* Setup input for const_binop_1() */
1742 args.type = TREE_TYPE (arg1);
1743 args.d1 = d1;
1744 args.d2 = d2;
1745 args.code = code;
1746
1747 if (do_float_handler (const_binop_1, (PTR) &args))
1748 /* Receive output from const_binop_1. */
1749 t = args.t;
1750 else
1751 {
1752 /* We got an exception from const_binop_1. */
1753 t = copy_node (arg1);
1754 overflow = 1;
1755 }
1756
1757 TREE_OVERFLOW (t)
1758 = (force_fit_type (t, overflow)
1759 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1760 TREE_CONSTANT_OVERFLOW (t)
1761 = TREE_OVERFLOW (t)
1762 | TREE_CONSTANT_OVERFLOW (arg1)
1763 | TREE_CONSTANT_OVERFLOW (arg2);
1764 return t;
1765 }
1766 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1767 if (TREE_CODE (arg1) == COMPLEX_CST)
1768 {
1769 register tree type = TREE_TYPE (arg1);
1770 register tree r1 = TREE_REALPART (arg1);
1771 register tree i1 = TREE_IMAGPART (arg1);
1772 register tree r2 = TREE_REALPART (arg2);
1773 register tree i2 = TREE_IMAGPART (arg2);
1774 register tree t;
1775
1776 switch (code)
1777 {
1778 case PLUS_EXPR:
1779 t = build_complex (type,
1780 const_binop (PLUS_EXPR, r1, r2, notrunc),
1781 const_binop (PLUS_EXPR, i1, i2, notrunc));
1782 break;
1783
1784 case MINUS_EXPR:
1785 t = build_complex (type,
1786 const_binop (MINUS_EXPR, r1, r2, notrunc),
1787 const_binop (MINUS_EXPR, i1, i2, notrunc));
1788 break;
1789
1790 case MULT_EXPR:
1791 t = build_complex (type,
1792 const_binop (MINUS_EXPR,
1793 const_binop (MULT_EXPR,
1794 r1, r2, notrunc),
1795 const_binop (MULT_EXPR,
1796 i1, i2, notrunc),
1797 notrunc),
1798 const_binop (PLUS_EXPR,
1799 const_binop (MULT_EXPR,
1800 r1, i2, notrunc),
1801 const_binop (MULT_EXPR,
1802 i1, r2, notrunc),
1803 notrunc));
1804 break;
1805
1806 case RDIV_EXPR:
1807 {
1808 register tree magsquared
1809 = const_binop (PLUS_EXPR,
1810 const_binop (MULT_EXPR, r2, r2, notrunc),
1811 const_binop (MULT_EXPR, i2, i2, notrunc),
1812 notrunc);
1813
1814 t = build_complex (type,
1815 const_binop
1816 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1817 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1818 const_binop (PLUS_EXPR,
1819 const_binop (MULT_EXPR, r1, r2,
1820 notrunc),
1821 const_binop (MULT_EXPR, i1, i2,
1822 notrunc),
1823 notrunc),
1824 magsquared, notrunc),
1825 const_binop
1826 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1827 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1828 const_binop (MINUS_EXPR,
1829 const_binop (MULT_EXPR, i1, r2,
1830 notrunc),
1831 const_binop (MULT_EXPR, r1, i2,
1832 notrunc),
1833 notrunc),
1834 magsquared, notrunc));
1835 }
1836 break;
1837
1838 default:
1839 abort ();
1840 }
1841 return t;
1842 }
1843 return 0;
1844 }
1845 \f
1846 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1847 bits are given by NUMBER and of the sizetype represented by KIND. */
1848
1849 tree
1850 size_int_wide (number, kind)
1851 HOST_WIDE_INT number;
1852 enum size_type_kind kind;
1853 {
1854 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1855 }
1856
1857 /* Likewise, but the desired type is specified explicitly. */
1858
1859 tree
1860 size_int_type_wide (number, type)
1861 HOST_WIDE_INT number;
1862 tree type;
1863 {
1864 /* Type-size nodes already made for small sizes. */
1865 static tree size_table[2048 + 1];
1866 static int init_p = 0;
1867 tree t;
1868
1869 if (! init_p)
1870 {
1871 ggc_add_tree_root ((tree *) size_table,
1872 sizeof size_table / sizeof (tree));
1873 init_p = 1;
1874 }
1875
1876 /* If this is a positive number that fits in the table we use to hold
1877 cached entries, see if it is already in the table and put it there
1878 if not. */
1879 if (number >= 0 && number < (int) ARRAY_SIZE (size_table))
1880 {
1881 if (size_table[number] != 0)
1882 for (t = size_table[number]; t != 0; t = TREE_CHAIN (t))
1883 if (TREE_TYPE (t) == type)
1884 return t;
1885
1886 t = build_int_2 (number, 0);
1887 TREE_TYPE (t) = type;
1888 TREE_CHAIN (t) = size_table[number];
1889 size_table[number] = t;
1890
1891 return t;
1892 }
1893
1894 t = build_int_2 (number, number < 0 ? -1 : 0);
1895 TREE_TYPE (t) = type;
1896 TREE_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (t) = force_fit_type (t, 0);
1897 return t;
1898 }
1899
1900 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1901 is a tree code. The type of the result is taken from the operands.
1902 Both must be the same type integer type and it must be a size type.
1903 If the operands are constant, so is the result. */
1904
1905 tree
1906 size_binop (code, arg0, arg1)
1907 enum tree_code code;
1908 tree arg0, arg1;
1909 {
1910 tree type = TREE_TYPE (arg0);
1911
1912 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1913 || type != TREE_TYPE (arg1))
1914 abort ();
1915
1916 /* Handle the special case of two integer constants faster. */
1917 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1918 {
1919 /* And some specific cases even faster than that. */
1920 if (code == PLUS_EXPR && integer_zerop (arg0))
1921 return arg1;
1922 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1923 && integer_zerop (arg1))
1924 return arg0;
1925 else if (code == MULT_EXPR && integer_onep (arg0))
1926 return arg1;
1927
1928 /* Handle general case of two integer constants. */
1929 return int_const_binop (code, arg0, arg1, 0, 1);
1930 }
1931
1932 if (arg0 == error_mark_node || arg1 == error_mark_node)
1933 return error_mark_node;
1934
1935 return fold (build (code, type, arg0, arg1));
1936 }
1937
1938 /* Given two values, either both of sizetype or both of bitsizetype,
1939 compute the difference between the two values. Return the value
1940 in signed type corresponding to the type of the operands. */
1941
1942 tree
1943 size_diffop (arg0, arg1)
1944 tree arg0, arg1;
1945 {
1946 tree type = TREE_TYPE (arg0);
1947 tree ctype;
1948
1949 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1950 || type != TREE_TYPE (arg1))
1951 abort ();
1952
1953 /* If the type is already signed, just do the simple thing. */
1954 if (! TREE_UNSIGNED (type))
1955 return size_binop (MINUS_EXPR, arg0, arg1);
1956
1957 ctype = (type == bitsizetype || type == ubitsizetype
1958 ? sbitsizetype : ssizetype);
1959
1960 /* If either operand is not a constant, do the conversions to the signed
1961 type and subtract. The hardware will do the right thing with any
1962 overflow in the subtraction. */
1963 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1964 return size_binop (MINUS_EXPR, convert (ctype, arg0),
1965 convert (ctype, arg1));
1966
1967 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1968 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1969 overflow) and negate (which can't either). Special-case a result
1970 of zero while we're here. */
1971 if (tree_int_cst_equal (arg0, arg1))
1972 return convert (ctype, integer_zero_node);
1973 else if (tree_int_cst_lt (arg1, arg0))
1974 return convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1975 else
1976 return size_binop (MINUS_EXPR, convert (ctype, integer_zero_node),
1977 convert (ctype, size_binop (MINUS_EXPR, arg1, arg0)));
1978 }
1979 \f
1980 /* This structure is used to communicate arguments to fold_convert_1. */
1981 struct fc_args
1982 {
1983 tree arg1; /* Input: value to convert. */
1984 tree type; /* Input: type to convert value to. */
1985 tree t; /* Ouput: result of conversion. */
1986 };
1987
1988 /* Function to convert floating-point constants, protected by floating
1989 point exception handler. */
1990
1991 static void
1992 fold_convert_1 (data)
1993 PTR data;
1994 {
1995 struct fc_args *args = (struct fc_args *) data;
1996
1997 args->t = build_real (args->type,
1998 real_value_truncate (TYPE_MODE (args->type),
1999 TREE_REAL_CST (args->arg1)));
2000 }
2001
2002 /* Given T, a tree representing type conversion of ARG1, a constant,
2003 return a constant tree representing the result of conversion. */
2004
2005 static tree
2006 fold_convert (t, arg1)
2007 register tree t;
2008 register tree arg1;
2009 {
2010 register tree type = TREE_TYPE (t);
2011 int overflow = 0;
2012
2013 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2014 {
2015 if (TREE_CODE (arg1) == INTEGER_CST)
2016 {
2017 /* If we would build a constant wider than GCC supports,
2018 leave the conversion unfolded. */
2019 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
2020 return t;
2021
2022 /* If we are trying to make a sizetype for a small integer, use
2023 size_int to pick up cached types to reduce duplicate nodes. */
2024 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
2025 && !TREE_CONSTANT_OVERFLOW (arg1)
2026 && compare_tree_int (arg1, 10000) < 0)
2027 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
2028
2029 /* Given an integer constant, make new constant with new type,
2030 appropriately sign-extended or truncated. */
2031 t = build_int_2 (TREE_INT_CST_LOW (arg1),
2032 TREE_INT_CST_HIGH (arg1));
2033 TREE_TYPE (t) = type;
2034 /* Indicate an overflow if (1) ARG1 already overflowed,
2035 or (2) force_fit_type indicates an overflow.
2036 Tell force_fit_type that an overflow has already occurred
2037 if ARG1 is a too-large unsigned value and T is signed.
2038 But don't indicate an overflow if converting a pointer. */
2039 TREE_OVERFLOW (t)
2040 = ((force_fit_type (t,
2041 (TREE_INT_CST_HIGH (arg1) < 0
2042 && (TREE_UNSIGNED (type)
2043 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
2044 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
2045 || TREE_OVERFLOW (arg1));
2046 TREE_CONSTANT_OVERFLOW (t)
2047 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2048 }
2049 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
2050 else if (TREE_CODE (arg1) == REAL_CST)
2051 {
2052 /* Don't initialize these, use assignments.
2053 Initialized local aggregates don't work on old compilers. */
2054 REAL_VALUE_TYPE x;
2055 REAL_VALUE_TYPE l;
2056 REAL_VALUE_TYPE u;
2057 tree type1 = TREE_TYPE (arg1);
2058 int no_upper_bound;
2059
2060 x = TREE_REAL_CST (arg1);
2061 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
2062
2063 no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
2064 if (!no_upper_bound)
2065 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
2066
2067 /* See if X will be in range after truncation towards 0.
2068 To compensate for truncation, move the bounds away from 0,
2069 but reject if X exactly equals the adjusted bounds. */
2070 #ifdef REAL_ARITHMETIC
2071 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
2072 if (!no_upper_bound)
2073 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
2074 #else
2075 l--;
2076 if (!no_upper_bound)
2077 u++;
2078 #endif
2079 /* If X is a NaN, use zero instead and show we have an overflow.
2080 Otherwise, range check. */
2081 if (REAL_VALUE_ISNAN (x))
2082 overflow = 1, x = dconst0;
2083 else if (! (REAL_VALUES_LESS (l, x)
2084 && !no_upper_bound
2085 && REAL_VALUES_LESS (x, u)))
2086 overflow = 1;
2087
2088 #ifndef REAL_ARITHMETIC
2089 {
2090 HOST_WIDE_INT low, high;
2091 HOST_WIDE_INT half_word
2092 = (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2);
2093
2094 if (x < 0)
2095 x = -x;
2096
2097 high = (HOST_WIDE_INT) (x / half_word / half_word);
2098 x -= (REAL_VALUE_TYPE) high * half_word * half_word;
2099 if (x >= (REAL_VALUE_TYPE) half_word * half_word / 2)
2100 {
2101 low = x - (REAL_VALUE_TYPE) half_word * half_word / 2;
2102 low |= (HOST_WIDE_INT) -1 << (HOST_BITS_PER_WIDE_INT - 1);
2103 }
2104 else
2105 low = (HOST_WIDE_INT) x;
2106 if (TREE_REAL_CST (arg1) < 0)
2107 neg_double (low, high, &low, &high);
2108 t = build_int_2 (low, high);
2109 }
2110 #else
2111 {
2112 HOST_WIDE_INT low, high;
2113 REAL_VALUE_TO_INT (&low, &high, x);
2114 t = build_int_2 (low, high);
2115 }
2116 #endif
2117 TREE_TYPE (t) = type;
2118 TREE_OVERFLOW (t)
2119 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
2120 TREE_CONSTANT_OVERFLOW (t)
2121 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2122 }
2123 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
2124 TREE_TYPE (t) = type;
2125 }
2126 else if (TREE_CODE (type) == REAL_TYPE)
2127 {
2128 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
2129 if (TREE_CODE (arg1) == INTEGER_CST)
2130 return build_real_from_int_cst (type, arg1);
2131 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
2132 if (TREE_CODE (arg1) == REAL_CST)
2133 {
2134 struct fc_args args;
2135
2136 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
2137 {
2138 t = arg1;
2139 TREE_TYPE (arg1) = type;
2140 return t;
2141 }
2142
2143 /* Setup input for fold_convert_1() */
2144 args.arg1 = arg1;
2145 args.type = type;
2146
2147 if (do_float_handler (fold_convert_1, (PTR) &args))
2148 {
2149 /* Receive output from fold_convert_1() */
2150 t = args.t;
2151 }
2152 else
2153 {
2154 /* We got an exception from fold_convert_1() */
2155 overflow = 1;
2156 t = copy_node (arg1);
2157 }
2158
2159 TREE_OVERFLOW (t)
2160 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
2161 TREE_CONSTANT_OVERFLOW (t)
2162 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2163 return t;
2164 }
2165 }
2166 TREE_CONSTANT (t) = 1;
2167 return t;
2168 }
2169 \f
2170 /* Return an expr equal to X but certainly not valid as an lvalue. */
2171
2172 tree
2173 non_lvalue (x)
2174 tree x;
2175 {
2176 tree result;
2177
2178 /* These things are certainly not lvalues. */
2179 if (TREE_CODE (x) == NON_LVALUE_EXPR
2180 || TREE_CODE (x) == INTEGER_CST
2181 || TREE_CODE (x) == REAL_CST
2182 || TREE_CODE (x) == STRING_CST
2183 || TREE_CODE (x) == ADDR_EXPR)
2184 return x;
2185
2186 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2187 TREE_CONSTANT (result) = TREE_CONSTANT (x);
2188 return result;
2189 }
2190
2191 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2192 Zero means allow extended lvalues. */
2193
2194 int pedantic_lvalues;
2195
2196 /* When pedantic, return an expr equal to X but certainly not valid as a
2197 pedantic lvalue. Otherwise, return X. */
2198
2199 tree
2200 pedantic_non_lvalue (x)
2201 tree x;
2202 {
2203 if (pedantic_lvalues)
2204 return non_lvalue (x);
2205 else
2206 return x;
2207 }
2208 \f
2209 /* Given a tree comparison code, return the code that is the logical inverse
2210 of the given code. It is not safe to do this for floating-point
2211 comparisons, except for NE_EXPR and EQ_EXPR. */
2212
2213 static enum tree_code
2214 invert_tree_comparison (code)
2215 enum tree_code code;
2216 {
2217 switch (code)
2218 {
2219 case EQ_EXPR:
2220 return NE_EXPR;
2221 case NE_EXPR:
2222 return EQ_EXPR;
2223 case GT_EXPR:
2224 return LE_EXPR;
2225 case GE_EXPR:
2226 return LT_EXPR;
2227 case LT_EXPR:
2228 return GE_EXPR;
2229 case LE_EXPR:
2230 return GT_EXPR;
2231 default:
2232 abort ();
2233 }
2234 }
2235
2236 /* Similar, but return the comparison that results if the operands are
2237 swapped. This is safe for floating-point. */
2238
2239 static enum tree_code
2240 swap_tree_comparison (code)
2241 enum tree_code code;
2242 {
2243 switch (code)
2244 {
2245 case EQ_EXPR:
2246 case NE_EXPR:
2247 return code;
2248 case GT_EXPR:
2249 return LT_EXPR;
2250 case GE_EXPR:
2251 return LE_EXPR;
2252 case LT_EXPR:
2253 return GT_EXPR;
2254 case LE_EXPR:
2255 return GE_EXPR;
2256 default:
2257 abort ();
2258 }
2259 }
2260
2261 /* Return nonzero if CODE is a tree code that represents a truth value. */
2262
2263 static int
2264 truth_value_p (code)
2265 enum tree_code code;
2266 {
2267 return (TREE_CODE_CLASS (code) == '<'
2268 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2269 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2270 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2271 }
2272 \f
2273 /* Return nonzero if two operands are necessarily equal.
2274 If ONLY_CONST is non-zero, only return non-zero for constants.
2275 This function tests whether the operands are indistinguishable;
2276 it does not test whether they are equal using C's == operation.
2277 The distinction is important for IEEE floating point, because
2278 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2279 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
2280
2281 int
2282 operand_equal_p (arg0, arg1, only_const)
2283 tree arg0, arg1;
2284 int only_const;
2285 {
2286 /* If both types don't have the same signedness, then we can't consider
2287 them equal. We must check this before the STRIP_NOPS calls
2288 because they may change the signedness of the arguments. */
2289 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
2290 return 0;
2291
2292 STRIP_NOPS (arg0);
2293 STRIP_NOPS (arg1);
2294
2295 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2296 /* This is needed for conversions and for COMPONENT_REF.
2297 Might as well play it safe and always test this. */
2298 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2299 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2300 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2301 return 0;
2302
2303 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2304 We don't care about side effects in that case because the SAVE_EXPR
2305 takes care of that for us. In all other cases, two expressions are
2306 equal if they have no side effects. If we have two identical
2307 expressions with side effects that should be treated the same due
2308 to the only side effects being identical SAVE_EXPR's, that will
2309 be detected in the recursive calls below. */
2310 if (arg0 == arg1 && ! only_const
2311 && (TREE_CODE (arg0) == SAVE_EXPR
2312 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2313 return 1;
2314
2315 /* Next handle constant cases, those for which we can return 1 even
2316 if ONLY_CONST is set. */
2317 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2318 switch (TREE_CODE (arg0))
2319 {
2320 case INTEGER_CST:
2321 return (! TREE_CONSTANT_OVERFLOW (arg0)
2322 && ! TREE_CONSTANT_OVERFLOW (arg1)
2323 && tree_int_cst_equal (arg0, arg1));
2324
2325 case REAL_CST:
2326 return (! TREE_CONSTANT_OVERFLOW (arg0)
2327 && ! TREE_CONSTANT_OVERFLOW (arg1)
2328 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2329 TREE_REAL_CST (arg1)));
2330
2331 case COMPLEX_CST:
2332 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2333 only_const)
2334 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2335 only_const));
2336
2337 case STRING_CST:
2338 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2339 && ! memcmp (TREE_STRING_POINTER (arg0),
2340 TREE_STRING_POINTER (arg1),
2341 TREE_STRING_LENGTH (arg0)));
2342
2343 case ADDR_EXPR:
2344 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2345 0);
2346 default:
2347 break;
2348 }
2349
2350 if (only_const)
2351 return 0;
2352
2353 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2354 {
2355 case '1':
2356 /* Two conversions are equal only if signedness and modes match. */
2357 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2358 && (TREE_UNSIGNED (TREE_TYPE (arg0))
2359 != TREE_UNSIGNED (TREE_TYPE (arg1))))
2360 return 0;
2361
2362 return operand_equal_p (TREE_OPERAND (arg0, 0),
2363 TREE_OPERAND (arg1, 0), 0);
2364
2365 case '<':
2366 case '2':
2367 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2368 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2369 0))
2370 return 1;
2371
2372 /* For commutative ops, allow the other order. */
2373 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
2374 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
2375 || TREE_CODE (arg0) == BIT_IOR_EXPR
2376 || TREE_CODE (arg0) == BIT_XOR_EXPR
2377 || TREE_CODE (arg0) == BIT_AND_EXPR
2378 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
2379 && operand_equal_p (TREE_OPERAND (arg0, 0),
2380 TREE_OPERAND (arg1, 1), 0)
2381 && operand_equal_p (TREE_OPERAND (arg0, 1),
2382 TREE_OPERAND (arg1, 0), 0));
2383
2384 case 'r':
2385 /* If either of the pointer (or reference) expressions we are dereferencing
2386 contain a side effect, these cannot be equal. */
2387 if (TREE_SIDE_EFFECTS (arg0)
2388 || TREE_SIDE_EFFECTS (arg1))
2389 return 0;
2390
2391 switch (TREE_CODE (arg0))
2392 {
2393 case INDIRECT_REF:
2394 return operand_equal_p (TREE_OPERAND (arg0, 0),
2395 TREE_OPERAND (arg1, 0), 0);
2396
2397 case COMPONENT_REF:
2398 case ARRAY_REF:
2399 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2400 TREE_OPERAND (arg1, 0), 0)
2401 && operand_equal_p (TREE_OPERAND (arg0, 1),
2402 TREE_OPERAND (arg1, 1), 0));
2403
2404 case BIT_FIELD_REF:
2405 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2406 TREE_OPERAND (arg1, 0), 0)
2407 && operand_equal_p (TREE_OPERAND (arg0, 1),
2408 TREE_OPERAND (arg1, 1), 0)
2409 && operand_equal_p (TREE_OPERAND (arg0, 2),
2410 TREE_OPERAND (arg1, 2), 0));
2411 default:
2412 return 0;
2413 }
2414
2415 case 'e':
2416 if (TREE_CODE (arg0) == RTL_EXPR)
2417 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2418 return 0;
2419
2420 default:
2421 return 0;
2422 }
2423 }
2424 \f
2425 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2426 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2427
2428 When in doubt, return 0. */
2429
2430 static int
2431 operand_equal_for_comparison_p (arg0, arg1, other)
2432 tree arg0, arg1;
2433 tree other;
2434 {
2435 int unsignedp1, unsignedpo;
2436 tree primarg0, primarg1, primother;
2437 unsigned int correct_width;
2438
2439 if (operand_equal_p (arg0, arg1, 0))
2440 return 1;
2441
2442 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2443 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2444 return 0;
2445
2446 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2447 and see if the inner values are the same. This removes any
2448 signedness comparison, which doesn't matter here. */
2449 primarg0 = arg0, primarg1 = arg1;
2450 STRIP_NOPS (primarg0);
2451 STRIP_NOPS (primarg1);
2452 if (operand_equal_p (primarg0, primarg1, 0))
2453 return 1;
2454
2455 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2456 actual comparison operand, ARG0.
2457
2458 First throw away any conversions to wider types
2459 already present in the operands. */
2460
2461 primarg1 = get_narrower (arg1, &unsignedp1);
2462 primother = get_narrower (other, &unsignedpo);
2463
2464 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2465 if (unsignedp1 == unsignedpo
2466 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2467 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2468 {
2469 tree type = TREE_TYPE (arg0);
2470
2471 /* Make sure shorter operand is extended the right way
2472 to match the longer operand. */
2473 primarg1 = convert (signed_or_unsigned_type (unsignedp1,
2474 TREE_TYPE (primarg1)),
2475 primarg1);
2476
2477 if (operand_equal_p (arg0, convert (type, primarg1), 0))
2478 return 1;
2479 }
2480
2481 return 0;
2482 }
2483 \f
2484 /* See if ARG is an expression that is either a comparison or is performing
2485 arithmetic on comparisons. The comparisons must only be comparing
2486 two different values, which will be stored in *CVAL1 and *CVAL2; if
2487 they are non-zero it means that some operands have already been found.
2488 No variables may be used anywhere else in the expression except in the
2489 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2490 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2491
2492 If this is true, return 1. Otherwise, return zero. */
2493
2494 static int
2495 twoval_comparison_p (arg, cval1, cval2, save_p)
2496 tree arg;
2497 tree *cval1, *cval2;
2498 int *save_p;
2499 {
2500 enum tree_code code = TREE_CODE (arg);
2501 char class = TREE_CODE_CLASS (code);
2502
2503 /* We can handle some of the 'e' cases here. */
2504 if (class == 'e' && code == TRUTH_NOT_EXPR)
2505 class = '1';
2506 else if (class == 'e'
2507 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2508 || code == COMPOUND_EXPR))
2509 class = '2';
2510
2511 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2512 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2513 {
2514 /* If we've already found a CVAL1 or CVAL2, this expression is
2515 two complex to handle. */
2516 if (*cval1 || *cval2)
2517 return 0;
2518
2519 class = '1';
2520 *save_p = 1;
2521 }
2522
2523 switch (class)
2524 {
2525 case '1':
2526 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2527
2528 case '2':
2529 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2530 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2531 cval1, cval2, save_p));
2532
2533 case 'c':
2534 return 1;
2535
2536 case 'e':
2537 if (code == COND_EXPR)
2538 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2539 cval1, cval2, save_p)
2540 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2541 cval1, cval2, save_p)
2542 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2543 cval1, cval2, save_p));
2544 return 0;
2545
2546 case '<':
2547 /* First see if we can handle the first operand, then the second. For
2548 the second operand, we know *CVAL1 can't be zero. It must be that
2549 one side of the comparison is each of the values; test for the
2550 case where this isn't true by failing if the two operands
2551 are the same. */
2552
2553 if (operand_equal_p (TREE_OPERAND (arg, 0),
2554 TREE_OPERAND (arg, 1), 0))
2555 return 0;
2556
2557 if (*cval1 == 0)
2558 *cval1 = TREE_OPERAND (arg, 0);
2559 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2560 ;
2561 else if (*cval2 == 0)
2562 *cval2 = TREE_OPERAND (arg, 0);
2563 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2564 ;
2565 else
2566 return 0;
2567
2568 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2569 ;
2570 else if (*cval2 == 0)
2571 *cval2 = TREE_OPERAND (arg, 1);
2572 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2573 ;
2574 else
2575 return 0;
2576
2577 return 1;
2578
2579 default:
2580 return 0;
2581 }
2582 }
2583 \f
2584 /* ARG is a tree that is known to contain just arithmetic operations and
2585 comparisons. Evaluate the operations in the tree substituting NEW0 for
2586 any occurrence of OLD0 as an operand of a comparison and likewise for
2587 NEW1 and OLD1. */
2588
2589 static tree
2590 eval_subst (arg, old0, new0, old1, new1)
2591 tree arg;
2592 tree old0, new0, old1, new1;
2593 {
2594 tree type = TREE_TYPE (arg);
2595 enum tree_code code = TREE_CODE (arg);
2596 char class = TREE_CODE_CLASS (code);
2597
2598 /* We can handle some of the 'e' cases here. */
2599 if (class == 'e' && code == TRUTH_NOT_EXPR)
2600 class = '1';
2601 else if (class == 'e'
2602 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2603 class = '2';
2604
2605 switch (class)
2606 {
2607 case '1':
2608 return fold (build1 (code, type,
2609 eval_subst (TREE_OPERAND (arg, 0),
2610 old0, new0, old1, new1)));
2611
2612 case '2':
2613 return fold (build (code, type,
2614 eval_subst (TREE_OPERAND (arg, 0),
2615 old0, new0, old1, new1),
2616 eval_subst (TREE_OPERAND (arg, 1),
2617 old0, new0, old1, new1)));
2618
2619 case 'e':
2620 switch (code)
2621 {
2622 case SAVE_EXPR:
2623 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2624
2625 case COMPOUND_EXPR:
2626 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2627
2628 case COND_EXPR:
2629 return fold (build (code, type,
2630 eval_subst (TREE_OPERAND (arg, 0),
2631 old0, new0, old1, new1),
2632 eval_subst (TREE_OPERAND (arg, 1),
2633 old0, new0, old1, new1),
2634 eval_subst (TREE_OPERAND (arg, 2),
2635 old0, new0, old1, new1)));
2636 default:
2637 break;
2638 }
2639 /* fall through - ??? */
2640
2641 case '<':
2642 {
2643 tree arg0 = TREE_OPERAND (arg, 0);
2644 tree arg1 = TREE_OPERAND (arg, 1);
2645
2646 /* We need to check both for exact equality and tree equality. The
2647 former will be true if the operand has a side-effect. In that
2648 case, we know the operand occurred exactly once. */
2649
2650 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2651 arg0 = new0;
2652 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2653 arg0 = new1;
2654
2655 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2656 arg1 = new0;
2657 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2658 arg1 = new1;
2659
2660 return fold (build (code, type, arg0, arg1));
2661 }
2662
2663 default:
2664 return arg;
2665 }
2666 }
2667 \f
2668 /* Return a tree for the case when the result of an expression is RESULT
2669 converted to TYPE and OMITTED was previously an operand of the expression
2670 but is now not needed (e.g., we folded OMITTED * 0).
2671
2672 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2673 the conversion of RESULT to TYPE. */
2674
2675 static tree
2676 omit_one_operand (type, result, omitted)
2677 tree type, result, omitted;
2678 {
2679 tree t = convert (type, result);
2680
2681 if (TREE_SIDE_EFFECTS (omitted))
2682 return build (COMPOUND_EXPR, type, omitted, t);
2683
2684 return non_lvalue (t);
2685 }
2686
2687 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2688
2689 static tree
2690 pedantic_omit_one_operand (type, result, omitted)
2691 tree type, result, omitted;
2692 {
2693 tree t = convert (type, result);
2694
2695 if (TREE_SIDE_EFFECTS (omitted))
2696 return build (COMPOUND_EXPR, type, omitted, t);
2697
2698 return pedantic_non_lvalue (t);
2699 }
2700 \f
2701 /* Return a simplified tree node for the truth-negation of ARG. This
2702 never alters ARG itself. We assume that ARG is an operation that
2703 returns a truth value (0 or 1). */
2704
2705 tree
2706 invert_truthvalue (arg)
2707 tree arg;
2708 {
2709 tree type = TREE_TYPE (arg);
2710 enum tree_code code = TREE_CODE (arg);
2711
2712 if (code == ERROR_MARK)
2713 return arg;
2714
2715 /* If this is a comparison, we can simply invert it, except for
2716 floating-point non-equality comparisons, in which case we just
2717 enclose a TRUTH_NOT_EXPR around what we have. */
2718
2719 if (TREE_CODE_CLASS (code) == '<')
2720 {
2721 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2722 && !flag_unsafe_math_optimizations
2723 && code != NE_EXPR
2724 && code != EQ_EXPR)
2725 return build1 (TRUTH_NOT_EXPR, type, arg);
2726 else
2727 return build (invert_tree_comparison (code), type,
2728 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2729 }
2730
2731 switch (code)
2732 {
2733 case INTEGER_CST:
2734 return convert (type, build_int_2 (integer_zerop (arg), 0));
2735
2736 case TRUTH_AND_EXPR:
2737 return build (TRUTH_OR_EXPR, type,
2738 invert_truthvalue (TREE_OPERAND (arg, 0)),
2739 invert_truthvalue (TREE_OPERAND (arg, 1)));
2740
2741 case TRUTH_OR_EXPR:
2742 return build (TRUTH_AND_EXPR, type,
2743 invert_truthvalue (TREE_OPERAND (arg, 0)),
2744 invert_truthvalue (TREE_OPERAND (arg, 1)));
2745
2746 case TRUTH_XOR_EXPR:
2747 /* Here we can invert either operand. We invert the first operand
2748 unless the second operand is a TRUTH_NOT_EXPR in which case our
2749 result is the XOR of the first operand with the inside of the
2750 negation of the second operand. */
2751
2752 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2753 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2754 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2755 else
2756 return build (TRUTH_XOR_EXPR, type,
2757 invert_truthvalue (TREE_OPERAND (arg, 0)),
2758 TREE_OPERAND (arg, 1));
2759
2760 case TRUTH_ANDIF_EXPR:
2761 return build (TRUTH_ORIF_EXPR, type,
2762 invert_truthvalue (TREE_OPERAND (arg, 0)),
2763 invert_truthvalue (TREE_OPERAND (arg, 1)));
2764
2765 case TRUTH_ORIF_EXPR:
2766 return build (TRUTH_ANDIF_EXPR, type,
2767 invert_truthvalue (TREE_OPERAND (arg, 0)),
2768 invert_truthvalue (TREE_OPERAND (arg, 1)));
2769
2770 case TRUTH_NOT_EXPR:
2771 return TREE_OPERAND (arg, 0);
2772
2773 case COND_EXPR:
2774 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2775 invert_truthvalue (TREE_OPERAND (arg, 1)),
2776 invert_truthvalue (TREE_OPERAND (arg, 2)));
2777
2778 case COMPOUND_EXPR:
2779 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2780 invert_truthvalue (TREE_OPERAND (arg, 1)));
2781
2782 case WITH_RECORD_EXPR:
2783 return build (WITH_RECORD_EXPR, type,
2784 invert_truthvalue (TREE_OPERAND (arg, 0)),
2785 TREE_OPERAND (arg, 1));
2786
2787 case NON_LVALUE_EXPR:
2788 return invert_truthvalue (TREE_OPERAND (arg, 0));
2789
2790 case NOP_EXPR:
2791 case CONVERT_EXPR:
2792 case FLOAT_EXPR:
2793 return build1 (TREE_CODE (arg), type,
2794 invert_truthvalue (TREE_OPERAND (arg, 0)));
2795
2796 case BIT_AND_EXPR:
2797 if (!integer_onep (TREE_OPERAND (arg, 1)))
2798 break;
2799 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2800
2801 case SAVE_EXPR:
2802 return build1 (TRUTH_NOT_EXPR, type, arg);
2803
2804 case CLEANUP_POINT_EXPR:
2805 return build1 (CLEANUP_POINT_EXPR, type,
2806 invert_truthvalue (TREE_OPERAND (arg, 0)));
2807
2808 default:
2809 break;
2810 }
2811 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2812 abort ();
2813 return build1 (TRUTH_NOT_EXPR, type, arg);
2814 }
2815
2816 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2817 operands are another bit-wise operation with a common input. If so,
2818 distribute the bit operations to save an operation and possibly two if
2819 constants are involved. For example, convert
2820 (A | B) & (A | C) into A | (B & C)
2821 Further simplification will occur if B and C are constants.
2822
2823 If this optimization cannot be done, 0 will be returned. */
2824
2825 static tree
2826 distribute_bit_expr (code, type, arg0, arg1)
2827 enum tree_code code;
2828 tree type;
2829 tree arg0, arg1;
2830 {
2831 tree common;
2832 tree left, right;
2833
2834 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2835 || TREE_CODE (arg0) == code
2836 || (TREE_CODE (arg0) != BIT_AND_EXPR
2837 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2838 return 0;
2839
2840 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2841 {
2842 common = TREE_OPERAND (arg0, 0);
2843 left = TREE_OPERAND (arg0, 1);
2844 right = TREE_OPERAND (arg1, 1);
2845 }
2846 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2847 {
2848 common = TREE_OPERAND (arg0, 0);
2849 left = TREE_OPERAND (arg0, 1);
2850 right = TREE_OPERAND (arg1, 0);
2851 }
2852 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2853 {
2854 common = TREE_OPERAND (arg0, 1);
2855 left = TREE_OPERAND (arg0, 0);
2856 right = TREE_OPERAND (arg1, 1);
2857 }
2858 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2859 {
2860 common = TREE_OPERAND (arg0, 1);
2861 left = TREE_OPERAND (arg0, 0);
2862 right = TREE_OPERAND (arg1, 0);
2863 }
2864 else
2865 return 0;
2866
2867 return fold (build (TREE_CODE (arg0), type, common,
2868 fold (build (code, type, left, right))));
2869 }
2870 \f
2871 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2872 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2873
2874 static tree
2875 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2876 tree inner;
2877 tree type;
2878 int bitsize, bitpos;
2879 int unsignedp;
2880 {
2881 tree result = build (BIT_FIELD_REF, type, inner,
2882 size_int (bitsize), bitsize_int (bitpos));
2883
2884 TREE_UNSIGNED (result) = unsignedp;
2885
2886 return result;
2887 }
2888
2889 /* Optimize a bit-field compare.
2890
2891 There are two cases: First is a compare against a constant and the
2892 second is a comparison of two items where the fields are at the same
2893 bit position relative to the start of a chunk (byte, halfword, word)
2894 large enough to contain it. In these cases we can avoid the shift
2895 implicit in bitfield extractions.
2896
2897 For constants, we emit a compare of the shifted constant with the
2898 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2899 compared. For two fields at the same position, we do the ANDs with the
2900 similar mask and compare the result of the ANDs.
2901
2902 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2903 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2904 are the left and right operands of the comparison, respectively.
2905
2906 If the optimization described above can be done, we return the resulting
2907 tree. Otherwise we return zero. */
2908
2909 static tree
2910 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2911 enum tree_code code;
2912 tree compare_type;
2913 tree lhs, rhs;
2914 {
2915 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2916 tree type = TREE_TYPE (lhs);
2917 tree signed_type, unsigned_type;
2918 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2919 enum machine_mode lmode, rmode, nmode;
2920 int lunsignedp, runsignedp;
2921 int lvolatilep = 0, rvolatilep = 0;
2922 unsigned int alignment;
2923 tree linner, rinner = NULL_TREE;
2924 tree mask;
2925 tree offset;
2926
2927 /* Get all the information about the extractions being done. If the bit size
2928 if the same as the size of the underlying object, we aren't doing an
2929 extraction at all and so can do nothing. We also don't want to
2930 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2931 then will no longer be able to replace it. */
2932 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2933 &lunsignedp, &lvolatilep, &alignment);
2934 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2935 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2936 return 0;
2937
2938 if (!const_p)
2939 {
2940 /* If this is not a constant, we can only do something if bit positions,
2941 sizes, and signedness are the same. */
2942 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2943 &runsignedp, &rvolatilep, &alignment);
2944
2945 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2946 || lunsignedp != runsignedp || offset != 0
2947 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2948 return 0;
2949 }
2950
2951 /* See if we can find a mode to refer to this field. We should be able to,
2952 but fail if we can't. */
2953 nmode = get_best_mode (lbitsize, lbitpos,
2954 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2955 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2956 TYPE_ALIGN (TREE_TYPE (rinner))),
2957 word_mode, lvolatilep || rvolatilep);
2958 if (nmode == VOIDmode)
2959 return 0;
2960
2961 /* Set signed and unsigned types of the precision of this mode for the
2962 shifts below. */
2963 signed_type = type_for_mode (nmode, 0);
2964 unsigned_type = type_for_mode (nmode, 1);
2965
2966 /* Compute the bit position and size for the new reference and our offset
2967 within it. If the new reference is the same size as the original, we
2968 won't optimize anything, so return zero. */
2969 nbitsize = GET_MODE_BITSIZE (nmode);
2970 nbitpos = lbitpos & ~ (nbitsize - 1);
2971 lbitpos -= nbitpos;
2972 if (nbitsize == lbitsize)
2973 return 0;
2974
2975 if (BYTES_BIG_ENDIAN)
2976 lbitpos = nbitsize - lbitsize - lbitpos;
2977
2978 /* Make the mask to be used against the extracted field. */
2979 mask = build_int_2 (~0, ~0);
2980 TREE_TYPE (mask) = unsigned_type;
2981 force_fit_type (mask, 0);
2982 mask = convert (unsigned_type, mask);
2983 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2984 mask = const_binop (RSHIFT_EXPR, mask,
2985 size_int (nbitsize - lbitsize - lbitpos), 0);
2986
2987 if (! const_p)
2988 /* If not comparing with constant, just rework the comparison
2989 and return. */
2990 return build (code, compare_type,
2991 build (BIT_AND_EXPR, unsigned_type,
2992 make_bit_field_ref (linner, unsigned_type,
2993 nbitsize, nbitpos, 1),
2994 mask),
2995 build (BIT_AND_EXPR, unsigned_type,
2996 make_bit_field_ref (rinner, unsigned_type,
2997 nbitsize, nbitpos, 1),
2998 mask));
2999
3000 /* Otherwise, we are handling the constant case. See if the constant is too
3001 big for the field. Warn and return a tree of for 0 (false) if so. We do
3002 this not only for its own sake, but to avoid having to test for this
3003 error case below. If we didn't, we might generate wrong code.
3004
3005 For unsigned fields, the constant shifted right by the field length should
3006 be all zero. For signed fields, the high-order bits should agree with
3007 the sign bit. */
3008
3009 if (lunsignedp)
3010 {
3011 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3012 convert (unsigned_type, rhs),
3013 size_int (lbitsize), 0)))
3014 {
3015 warning ("comparison is always %d due to width of bitfield",
3016 code == NE_EXPR);
3017 return convert (compare_type,
3018 (code == NE_EXPR
3019 ? integer_one_node : integer_zero_node));
3020 }
3021 }
3022 else
3023 {
3024 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
3025 size_int (lbitsize - 1), 0);
3026 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3027 {
3028 warning ("comparison is always %d due to width of bitfield",
3029 code == NE_EXPR);
3030 return convert (compare_type,
3031 (code == NE_EXPR
3032 ? integer_one_node : integer_zero_node));
3033 }
3034 }
3035
3036 /* Single-bit compares should always be against zero. */
3037 if (lbitsize == 1 && ! integer_zerop (rhs))
3038 {
3039 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3040 rhs = convert (type, integer_zero_node);
3041 }
3042
3043 /* Make a new bitfield reference, shift the constant over the
3044 appropriate number of bits and mask it with the computed mask
3045 (in case this was a signed field). If we changed it, make a new one. */
3046 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3047 if (lvolatilep)
3048 {
3049 TREE_SIDE_EFFECTS (lhs) = 1;
3050 TREE_THIS_VOLATILE (lhs) = 1;
3051 }
3052
3053 rhs = fold (const_binop (BIT_AND_EXPR,
3054 const_binop (LSHIFT_EXPR,
3055 convert (unsigned_type, rhs),
3056 size_int (lbitpos), 0),
3057 mask, 0));
3058
3059 return build (code, compare_type,
3060 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
3061 rhs);
3062 }
3063 \f
3064 /* Subroutine for fold_truthop: decode a field reference.
3065
3066 If EXP is a comparison reference, we return the innermost reference.
3067
3068 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3069 set to the starting bit number.
3070
3071 If the innermost field can be completely contained in a mode-sized
3072 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3073
3074 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3075 otherwise it is not changed.
3076
3077 *PUNSIGNEDP is set to the signedness of the field.
3078
3079 *PMASK is set to the mask used. This is either contained in a
3080 BIT_AND_EXPR or derived from the width of the field.
3081
3082 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3083
3084 Return 0 if this is not a component reference or is one that we can't
3085 do anything with. */
3086
3087 static tree
3088 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
3089 pvolatilep, pmask, pand_mask)
3090 tree exp;
3091 HOST_WIDE_INT *pbitsize, *pbitpos;
3092 enum machine_mode *pmode;
3093 int *punsignedp, *pvolatilep;
3094 tree *pmask;
3095 tree *pand_mask;
3096 {
3097 tree and_mask = 0;
3098 tree mask, inner, offset;
3099 tree unsigned_type;
3100 unsigned int precision;
3101 unsigned int alignment;
3102
3103 /* All the optimizations using this function assume integer fields.
3104 There are problems with FP fields since the type_for_size call
3105 below can fail for, e.g., XFmode. */
3106 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3107 return 0;
3108
3109 STRIP_NOPS (exp);
3110
3111 if (TREE_CODE (exp) == BIT_AND_EXPR)
3112 {
3113 and_mask = TREE_OPERAND (exp, 1);
3114 exp = TREE_OPERAND (exp, 0);
3115 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3116 if (TREE_CODE (and_mask) != INTEGER_CST)
3117 return 0;
3118 }
3119
3120 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3121 punsignedp, pvolatilep, &alignment);
3122 if ((inner == exp && and_mask == 0)
3123 || *pbitsize < 0 || offset != 0
3124 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3125 return 0;
3126
3127 /* Compute the mask to access the bitfield. */
3128 unsigned_type = type_for_size (*pbitsize, 1);
3129 precision = TYPE_PRECISION (unsigned_type);
3130
3131 mask = build_int_2 (~0, ~0);
3132 TREE_TYPE (mask) = unsigned_type;
3133 force_fit_type (mask, 0);
3134 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3135 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3136
3137 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3138 if (and_mask != 0)
3139 mask = fold (build (BIT_AND_EXPR, unsigned_type,
3140 convert (unsigned_type, and_mask), mask));
3141
3142 *pmask = mask;
3143 *pand_mask = and_mask;
3144 return inner;
3145 }
3146
3147 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
3148 bit positions. */
3149
3150 static int
3151 all_ones_mask_p (mask, size)
3152 tree mask;
3153 int size;
3154 {
3155 tree type = TREE_TYPE (mask);
3156 unsigned int precision = TYPE_PRECISION (type);
3157 tree tmask;
3158
3159 tmask = build_int_2 (~0, ~0);
3160 TREE_TYPE (tmask) = signed_type (type);
3161 force_fit_type (tmask, 0);
3162 return
3163 tree_int_cst_equal (mask,
3164 const_binop (RSHIFT_EXPR,
3165 const_binop (LSHIFT_EXPR, tmask,
3166 size_int (precision - size),
3167 0),
3168 size_int (precision - size), 0));
3169 }
3170
3171 /* Subroutine for fold_truthop: determine if an operand is simple enough
3172 to be evaluated unconditionally. */
3173
3174 static int
3175 simple_operand_p (exp)
3176 tree exp;
3177 {
3178 /* Strip any conversions that don't change the machine mode. */
3179 while ((TREE_CODE (exp) == NOP_EXPR
3180 || TREE_CODE (exp) == CONVERT_EXPR)
3181 && (TYPE_MODE (TREE_TYPE (exp))
3182 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3183 exp = TREE_OPERAND (exp, 0);
3184
3185 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3186 || (DECL_P (exp)
3187 && ! TREE_ADDRESSABLE (exp)
3188 && ! TREE_THIS_VOLATILE (exp)
3189 && ! DECL_NONLOCAL (exp)
3190 /* Don't regard global variables as simple. They may be
3191 allocated in ways unknown to the compiler (shared memory,
3192 #pragma weak, etc). */
3193 && ! TREE_PUBLIC (exp)
3194 && ! DECL_EXTERNAL (exp)
3195 /* Loading a static variable is unduly expensive, but global
3196 registers aren't expensive. */
3197 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3198 }
3199 \f
3200 /* The following functions are subroutines to fold_range_test and allow it to
3201 try to change a logical combination of comparisons into a range test.
3202
3203 For example, both
3204 X == 2 || X == 3 || X == 4 || X == 5
3205 and
3206 X >= 2 && X <= 5
3207 are converted to
3208 (unsigned) (X - 2) <= 3
3209
3210 We describe each set of comparisons as being either inside or outside
3211 a range, using a variable named like IN_P, and then describe the
3212 range with a lower and upper bound. If one of the bounds is omitted,
3213 it represents either the highest or lowest value of the type.
3214
3215 In the comments below, we represent a range by two numbers in brackets
3216 preceded by a "+" to designate being inside that range, or a "-" to
3217 designate being outside that range, so the condition can be inverted by
3218 flipping the prefix. An omitted bound is represented by a "-". For
3219 example, "- [-, 10]" means being outside the range starting at the lowest
3220 possible value and ending at 10, in other words, being greater than 10.
3221 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3222 always false.
3223
3224 We set up things so that the missing bounds are handled in a consistent
3225 manner so neither a missing bound nor "true" and "false" need to be
3226 handled using a special case. */
3227
3228 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3229 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3230 and UPPER1_P are nonzero if the respective argument is an upper bound
3231 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3232 must be specified for a comparison. ARG1 will be converted to ARG0's
3233 type if both are specified. */
3234
3235 static tree
3236 range_binop (code, type, arg0, upper0_p, arg1, upper1_p)
3237 enum tree_code code;
3238 tree type;
3239 tree arg0, arg1;
3240 int upper0_p, upper1_p;
3241 {
3242 tree tem;
3243 int result;
3244 int sgn0, sgn1;
3245
3246 /* If neither arg represents infinity, do the normal operation.
3247 Else, if not a comparison, return infinity. Else handle the special
3248 comparison rules. Note that most of the cases below won't occur, but
3249 are handled for consistency. */
3250
3251 if (arg0 != 0 && arg1 != 0)
3252 {
3253 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
3254 arg0, convert (TREE_TYPE (arg0), arg1)));
3255 STRIP_NOPS (tem);
3256 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3257 }
3258
3259 if (TREE_CODE_CLASS (code) != '<')
3260 return 0;
3261
3262 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3263 for neither. In real maths, we cannot assume open ended ranges are
3264 the same. But, this is computer arithmetic, where numbers are finite.
3265 We can therefore make the transformation of any unbounded range with
3266 the value Z, Z being greater than any representable number. This permits
3267 us to treat unbounded ranges as equal. */
3268 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3269 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3270 switch (code)
3271 {
3272 case EQ_EXPR:
3273 result = sgn0 == sgn1;
3274 break;
3275 case NE_EXPR:
3276 result = sgn0 != sgn1;
3277 break;
3278 case LT_EXPR:
3279 result = sgn0 < sgn1;
3280 break;
3281 case LE_EXPR:
3282 result = sgn0 <= sgn1;
3283 break;
3284 case GT_EXPR:
3285 result = sgn0 > sgn1;
3286 break;
3287 case GE_EXPR:
3288 result = sgn0 >= sgn1;
3289 break;
3290 default:
3291 abort ();
3292 }
3293
3294 return convert (type, result ? integer_one_node : integer_zero_node);
3295 }
3296 \f
3297 /* Given EXP, a logical expression, set the range it is testing into
3298 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3299 actually being tested. *PLOW and *PHIGH will be made of the same type
3300 as the returned expression. If EXP is not a comparison, we will most
3301 likely not be returning a useful value and range. */
3302
3303 static tree
3304 make_range (exp, pin_p, plow, phigh)
3305 tree exp;
3306 int *pin_p;
3307 tree *plow, *phigh;
3308 {
3309 enum tree_code code;
3310 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3311 tree orig_type = NULL_TREE;
3312 int in_p, n_in_p;
3313 tree low, high, n_low, n_high;
3314
3315 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3316 and see if we can refine the range. Some of the cases below may not
3317 happen, but it doesn't seem worth worrying about this. We "continue"
3318 the outer loop when we've changed something; otherwise we "break"
3319 the switch, which will "break" the while. */
3320
3321 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
3322
3323 while (1)
3324 {
3325 code = TREE_CODE (exp);
3326
3327 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3328 {
3329 arg0 = TREE_OPERAND (exp, 0);
3330 if (TREE_CODE_CLASS (code) == '<'
3331 || TREE_CODE_CLASS (code) == '1'
3332 || TREE_CODE_CLASS (code) == '2')
3333 type = TREE_TYPE (arg0);
3334 if (TREE_CODE_CLASS (code) == '2'
3335 || TREE_CODE_CLASS (code) == '<'
3336 || (TREE_CODE_CLASS (code) == 'e'
3337 && TREE_CODE_LENGTH (code) > 1))
3338 arg1 = TREE_OPERAND (exp, 1);
3339 }
3340
3341 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3342 lose a cast by accident. */
3343 if (type != NULL_TREE && orig_type == NULL_TREE)
3344 orig_type = type;
3345
3346 switch (code)
3347 {
3348 case TRUTH_NOT_EXPR:
3349 in_p = ! in_p, exp = arg0;
3350 continue;
3351
3352 case EQ_EXPR: case NE_EXPR:
3353 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3354 /* We can only do something if the range is testing for zero
3355 and if the second operand is an integer constant. Note that
3356 saying something is "in" the range we make is done by
3357 complementing IN_P since it will set in the initial case of
3358 being not equal to zero; "out" is leaving it alone. */
3359 if (low == 0 || high == 0
3360 || ! integer_zerop (low) || ! integer_zerop (high)
3361 || TREE_CODE (arg1) != INTEGER_CST)
3362 break;
3363
3364 switch (code)
3365 {
3366 case NE_EXPR: /* - [c, c] */
3367 low = high = arg1;
3368 break;
3369 case EQ_EXPR: /* + [c, c] */
3370 in_p = ! in_p, low = high = arg1;
3371 break;
3372 case GT_EXPR: /* - [-, c] */
3373 low = 0, high = arg1;
3374 break;
3375 case GE_EXPR: /* + [c, -] */
3376 in_p = ! in_p, low = arg1, high = 0;
3377 break;
3378 case LT_EXPR: /* - [c, -] */
3379 low = arg1, high = 0;
3380 break;
3381 case LE_EXPR: /* + [-, c] */
3382 in_p = ! in_p, low = 0, high = arg1;
3383 break;
3384 default:
3385 abort ();
3386 }
3387
3388 exp = arg0;
3389
3390 /* If this is an unsigned comparison, we also know that EXP is
3391 greater than or equal to zero. We base the range tests we make
3392 on that fact, so we record it here so we can parse existing
3393 range tests. */
3394 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3395 {
3396 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3397 1, convert (type, integer_zero_node),
3398 NULL_TREE))
3399 break;
3400
3401 in_p = n_in_p, low = n_low, high = n_high;
3402
3403 /* If the high bound is missing, but we
3404 have a low bound, reverse the range so
3405 it goes from zero to the low bound minus 1. */
3406 if (high == 0 && low)
3407 {
3408 in_p = ! in_p;
3409 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3410 integer_one_node, 0);
3411 low = convert (type, integer_zero_node);
3412 }
3413 }
3414 continue;
3415
3416 case NEGATE_EXPR:
3417 /* (-x) IN [a,b] -> x in [-b, -a] */
3418 n_low = range_binop (MINUS_EXPR, type,
3419 convert (type, integer_zero_node), 0, high, 1);
3420 n_high = range_binop (MINUS_EXPR, type,
3421 convert (type, integer_zero_node), 0, low, 0);
3422 low = n_low, high = n_high;
3423 exp = arg0;
3424 continue;
3425
3426 case BIT_NOT_EXPR:
3427 /* ~ X -> -X - 1 */
3428 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3429 convert (type, integer_one_node));
3430 continue;
3431
3432 case PLUS_EXPR: case MINUS_EXPR:
3433 if (TREE_CODE (arg1) != INTEGER_CST)
3434 break;
3435
3436 /* If EXP is signed, any overflow in the computation is undefined,
3437 so we don't worry about it so long as our computations on
3438 the bounds don't overflow. For unsigned, overflow is defined
3439 and this is exactly the right thing. */
3440 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3441 type, low, 0, arg1, 0);
3442 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3443 type, high, 1, arg1, 0);
3444 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3445 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3446 break;
3447
3448 /* Check for an unsigned range which has wrapped around the maximum
3449 value thus making n_high < n_low, and normalize it. */
3450 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3451 {
3452 low = range_binop (PLUS_EXPR, type, n_high, 0,
3453 integer_one_node, 0);
3454 high = range_binop (MINUS_EXPR, type, n_low, 0,
3455 integer_one_node, 0);
3456
3457 /* If the range is of the form +/- [ x+1, x ], we won't
3458 be able to normalize it. But then, it represents the
3459 whole range or the empty set, so make it
3460 +/- [ -, - ]. */
3461 if (tree_int_cst_equal (n_low, low)
3462 && tree_int_cst_equal (n_high, high))
3463 low = high = 0;
3464 else
3465 in_p = ! in_p;
3466 }
3467 else
3468 low = n_low, high = n_high;
3469
3470 exp = arg0;
3471 continue;
3472
3473 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3474 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3475 break;
3476
3477 if (! INTEGRAL_TYPE_P (type)
3478 || (low != 0 && ! int_fits_type_p (low, type))
3479 || (high != 0 && ! int_fits_type_p (high, type)))
3480 break;
3481
3482 n_low = low, n_high = high;
3483
3484 if (n_low != 0)
3485 n_low = convert (type, n_low);
3486
3487 if (n_high != 0)
3488 n_high = convert (type, n_high);
3489
3490 /* If we're converting from an unsigned to a signed type,
3491 we will be doing the comparison as unsigned. The tests above
3492 have already verified that LOW and HIGH are both positive.
3493
3494 So we have to make sure that the original unsigned value will
3495 be interpreted as positive. */
3496 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3497 {
3498 tree equiv_type = type_for_mode (TYPE_MODE (type), 1);
3499 tree high_positive;
3500
3501 /* A range without an upper bound is, naturally, unbounded.
3502 Since convert would have cropped a very large value, use
3503 the max value for the destination type. */
3504 high_positive
3505 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3506 : TYPE_MAX_VALUE (type);
3507
3508 high_positive = fold (build (RSHIFT_EXPR, type,
3509 convert (type, high_positive),
3510 convert (type, integer_one_node)));
3511
3512 /* If the low bound is specified, "and" the range with the
3513 range for which the original unsigned value will be
3514 positive. */
3515 if (low != 0)
3516 {
3517 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3518 1, n_low, n_high,
3519 1, convert (type, integer_zero_node),
3520 high_positive))
3521 break;
3522
3523 in_p = (n_in_p == in_p);
3524 }
3525 else
3526 {
3527 /* Otherwise, "or" the range with the range of the input
3528 that will be interpreted as negative. */
3529 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3530 0, n_low, n_high,
3531 1, convert (type, integer_zero_node),
3532 high_positive))
3533 break;
3534
3535 in_p = (in_p != n_in_p);
3536 }
3537 }
3538
3539 exp = arg0;
3540 low = n_low, high = n_high;
3541 continue;
3542
3543 default:
3544 break;
3545 }
3546
3547 break;
3548 }
3549
3550 /* If EXP is a constant, we can evaluate whether this is true or false. */
3551 if (TREE_CODE (exp) == INTEGER_CST)
3552 {
3553 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3554 exp, 0, low, 0))
3555 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3556 exp, 1, high, 1)));
3557 low = high = 0;
3558 exp = 0;
3559 }
3560
3561 *pin_p = in_p, *plow = low, *phigh = high;
3562 return exp;
3563 }
3564 \f
3565 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3566 type, TYPE, return an expression to test if EXP is in (or out of, depending
3567 on IN_P) the range. */
3568
3569 static tree
3570 build_range_check (type, exp, in_p, low, high)
3571 tree type;
3572 tree exp;
3573 int in_p;
3574 tree low, high;
3575 {
3576 tree etype = TREE_TYPE (exp);
3577 tree utype, value;
3578
3579 if (! in_p
3580 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3581 return invert_truthvalue (value);
3582
3583 else if (low == 0 && high == 0)
3584 return convert (type, integer_one_node);
3585
3586 else if (low == 0)
3587 return fold (build (LE_EXPR, type, exp, high));
3588
3589 else if (high == 0)
3590 return fold (build (GE_EXPR, type, exp, low));
3591
3592 else if (operand_equal_p (low, high, 0))
3593 return fold (build (EQ_EXPR, type, exp, low));
3594
3595 else if (TREE_UNSIGNED (etype) && integer_zerop (low))
3596 return build_range_check (type, exp, 1, 0, high);
3597
3598 else if (integer_zerop (low))
3599 {
3600 utype = unsigned_type (etype);
3601 return build_range_check (type, convert (utype, exp), 1, 0,
3602 convert (utype, high));
3603 }
3604
3605 else if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3606 && ! TREE_OVERFLOW (value))
3607 return build_range_check (type,
3608 fold (build (MINUS_EXPR, etype, exp, low)),
3609 1, convert (etype, integer_zero_node), value);
3610 else
3611 return 0;
3612 }
3613 \f
3614 /* Given two ranges, see if we can merge them into one. Return 1 if we
3615 can, 0 if we can't. Set the output range into the specified parameters. */
3616
3617 static int
3618 merge_ranges (pin_p, plow, phigh, in0_p, low0, high0, in1_p, low1, high1)
3619 int *pin_p;
3620 tree *plow, *phigh;
3621 int in0_p, in1_p;
3622 tree low0, high0, low1, high1;
3623 {
3624 int no_overlap;
3625 int subset;
3626 int temp;
3627 tree tem;
3628 int in_p;
3629 tree low, high;
3630 int lowequal = ((low0 == 0 && low1 == 0)
3631 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3632 low0, 0, low1, 0)));
3633 int highequal = ((high0 == 0 && high1 == 0)
3634 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3635 high0, 1, high1, 1)));
3636
3637 /* Make range 0 be the range that starts first, or ends last if they
3638 start at the same value. Swap them if it isn't. */
3639 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3640 low0, 0, low1, 0))
3641 || (lowequal
3642 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3643 high1, 1, high0, 1))))
3644 {
3645 temp = in0_p, in0_p = in1_p, in1_p = temp;
3646 tem = low0, low0 = low1, low1 = tem;
3647 tem = high0, high0 = high1, high1 = tem;
3648 }
3649
3650 /* Now flag two cases, whether the ranges are disjoint or whether the
3651 second range is totally subsumed in the first. Note that the tests
3652 below are simplified by the ones above. */
3653 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3654 high0, 1, low1, 0));
3655 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3656 high1, 1, high0, 1));
3657
3658 /* We now have four cases, depending on whether we are including or
3659 excluding the two ranges. */
3660 if (in0_p && in1_p)
3661 {
3662 /* If they don't overlap, the result is false. If the second range
3663 is a subset it is the result. Otherwise, the range is from the start
3664 of the second to the end of the first. */
3665 if (no_overlap)
3666 in_p = 0, low = high = 0;
3667 else if (subset)
3668 in_p = 1, low = low1, high = high1;
3669 else
3670 in_p = 1, low = low1, high = high0;
3671 }
3672
3673 else if (in0_p && ! in1_p)
3674 {
3675 /* If they don't overlap, the result is the first range. If they are
3676 equal, the result is false. If the second range is a subset of the
3677 first, and the ranges begin at the same place, we go from just after
3678 the end of the first range to the end of the second. If the second
3679 range is not a subset of the first, or if it is a subset and both
3680 ranges end at the same place, the range starts at the start of the
3681 first range and ends just before the second range.
3682 Otherwise, we can't describe this as a single range. */
3683 if (no_overlap)
3684 in_p = 1, low = low0, high = high0;
3685 else if (lowequal && highequal)
3686 in_p = 0, low = high = 0;
3687 else if (subset && lowequal)
3688 {
3689 in_p = 1, high = high0;
3690 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3691 integer_one_node, 0);
3692 }
3693 else if (! subset || highequal)
3694 {
3695 in_p = 1, low = low0;
3696 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3697 integer_one_node, 0);
3698 }
3699 else
3700 return 0;
3701 }
3702
3703 else if (! in0_p && in1_p)
3704 {
3705 /* If they don't overlap, the result is the second range. If the second
3706 is a subset of the first, the result is false. Otherwise,
3707 the range starts just after the first range and ends at the
3708 end of the second. */
3709 if (no_overlap)
3710 in_p = 1, low = low1, high = high1;
3711 else if (subset || highequal)
3712 in_p = 0, low = high = 0;
3713 else
3714 {
3715 in_p = 1, high = high1;
3716 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3717 integer_one_node, 0);
3718 }
3719 }
3720
3721 else
3722 {
3723 /* The case where we are excluding both ranges. Here the complex case
3724 is if they don't overlap. In that case, the only time we have a
3725 range is if they are adjacent. If the second is a subset of the
3726 first, the result is the first. Otherwise, the range to exclude
3727 starts at the beginning of the first range and ends at the end of the
3728 second. */
3729 if (no_overlap)
3730 {
3731 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3732 range_binop (PLUS_EXPR, NULL_TREE,
3733 high0, 1,
3734 integer_one_node, 1),
3735 1, low1, 0)))
3736 in_p = 0, low = low0, high = high1;
3737 else
3738 return 0;
3739 }
3740 else if (subset)
3741 in_p = 0, low = low0, high = high0;
3742 else
3743 in_p = 0, low = low0, high = high1;
3744 }
3745
3746 *pin_p = in_p, *plow = low, *phigh = high;
3747 return 1;
3748 }
3749 \f
3750 /* EXP is some logical combination of boolean tests. See if we can
3751 merge it into some range test. Return the new tree if so. */
3752
3753 static tree
3754 fold_range_test (exp)
3755 tree exp;
3756 {
3757 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3758 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3759 int in0_p, in1_p, in_p;
3760 tree low0, low1, low, high0, high1, high;
3761 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3762 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3763 tree tem;
3764
3765 /* If this is an OR operation, invert both sides; we will invert
3766 again at the end. */
3767 if (or_op)
3768 in0_p = ! in0_p, in1_p = ! in1_p;
3769
3770 /* If both expressions are the same, if we can merge the ranges, and we
3771 can build the range test, return it or it inverted. If one of the
3772 ranges is always true or always false, consider it to be the same
3773 expression as the other. */
3774 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3775 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3776 in1_p, low1, high1)
3777 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3778 lhs != 0 ? lhs
3779 : rhs != 0 ? rhs : integer_zero_node,
3780 in_p, low, high))))
3781 return or_op ? invert_truthvalue (tem) : tem;
3782
3783 /* On machines where the branch cost is expensive, if this is a
3784 short-circuited branch and the underlying object on both sides
3785 is the same, make a non-short-circuit operation. */
3786 else if (BRANCH_COST >= 2
3787 && lhs != 0 && rhs != 0
3788 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3789 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3790 && operand_equal_p (lhs, rhs, 0))
3791 {
3792 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3793 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3794 which cases we can't do this. */
3795 if (simple_operand_p (lhs))
3796 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3797 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3798 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3799 TREE_OPERAND (exp, 1));
3800
3801 else if (global_bindings_p () == 0
3802 && ! contains_placeholder_p (lhs))
3803 {
3804 tree common = save_expr (lhs);
3805
3806 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3807 or_op ? ! in0_p : in0_p,
3808 low0, high0))
3809 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3810 or_op ? ! in1_p : in1_p,
3811 low1, high1))))
3812 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3813 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3814 TREE_TYPE (exp), lhs, rhs);
3815 }
3816 }
3817
3818 return 0;
3819 }
3820 \f
3821 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3822 bit value. Arrange things so the extra bits will be set to zero if and
3823 only if C is signed-extended to its full width. If MASK is nonzero,
3824 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3825
3826 static tree
3827 unextend (c, p, unsignedp, mask)
3828 tree c;
3829 int p;
3830 int unsignedp;
3831 tree mask;
3832 {
3833 tree type = TREE_TYPE (c);
3834 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3835 tree temp;
3836
3837 if (p == modesize || unsignedp)
3838 return c;
3839
3840 /* We work by getting just the sign bit into the low-order bit, then
3841 into the high-order bit, then sign-extend. We then XOR that value
3842 with C. */
3843 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3844 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3845
3846 /* We must use a signed type in order to get an arithmetic right shift.
3847 However, we must also avoid introducing accidental overflows, so that
3848 a subsequent call to integer_zerop will work. Hence we must
3849 do the type conversion here. At this point, the constant is either
3850 zero or one, and the conversion to a signed type can never overflow.
3851 We could get an overflow if this conversion is done anywhere else. */
3852 if (TREE_UNSIGNED (type))
3853 temp = convert (signed_type (type), temp);
3854
3855 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3856 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3857 if (mask != 0)
3858 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3859 /* If necessary, convert the type back to match the type of C. */
3860 if (TREE_UNSIGNED (type))
3861 temp = convert (type, temp);
3862
3863 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3864 }
3865 \f
3866 /* Find ways of folding logical expressions of LHS and RHS:
3867 Try to merge two comparisons to the same innermost item.
3868 Look for range tests like "ch >= '0' && ch <= '9'".
3869 Look for combinations of simple terms on machines with expensive branches
3870 and evaluate the RHS unconditionally.
3871
3872 For example, if we have p->a == 2 && p->b == 4 and we can make an
3873 object large enough to span both A and B, we can do this with a comparison
3874 against the object ANDed with the a mask.
3875
3876 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3877 operations to do this with one comparison.
3878
3879 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3880 function and the one above.
3881
3882 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3883 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3884
3885 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3886 two operands.
3887
3888 We return the simplified tree or 0 if no optimization is possible. */
3889
3890 static tree
3891 fold_truthop (code, truth_type, lhs, rhs)
3892 enum tree_code code;
3893 tree truth_type, lhs, rhs;
3894 {
3895 /* If this is the "or" of two comparisons, we can do something if
3896 the comparisons are NE_EXPR. If this is the "and", we can do something
3897 if the comparisons are EQ_EXPR. I.e.,
3898 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3899
3900 WANTED_CODE is this operation code. For single bit fields, we can
3901 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3902 comparison for one-bit fields. */
3903
3904 enum tree_code wanted_code;
3905 enum tree_code lcode, rcode;
3906 tree ll_arg, lr_arg, rl_arg, rr_arg;
3907 tree ll_inner, lr_inner, rl_inner, rr_inner;
3908 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3909 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3910 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3911 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3912 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3913 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3914 enum machine_mode lnmode, rnmode;
3915 tree ll_mask, lr_mask, rl_mask, rr_mask;
3916 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3917 tree l_const, r_const;
3918 tree lntype, rntype, result;
3919 int first_bit, end_bit;
3920 int volatilep;
3921
3922 /* Start by getting the comparison codes. Fail if anything is volatile.
3923 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3924 it were surrounded with a NE_EXPR. */
3925
3926 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3927 return 0;
3928
3929 lcode = TREE_CODE (lhs);
3930 rcode = TREE_CODE (rhs);
3931
3932 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3933 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3934
3935 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3936 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3937
3938 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3939 return 0;
3940
3941 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3942 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3943
3944 ll_arg = TREE_OPERAND (lhs, 0);
3945 lr_arg = TREE_OPERAND (lhs, 1);
3946 rl_arg = TREE_OPERAND (rhs, 0);
3947 rr_arg = TREE_OPERAND (rhs, 1);
3948
3949 /* If the RHS can be evaluated unconditionally and its operands are
3950 simple, it wins to evaluate the RHS unconditionally on machines
3951 with expensive branches. In this case, this isn't a comparison
3952 that can be merged. Avoid doing this if the RHS is a floating-point
3953 comparison since those can trap. */
3954
3955 if (BRANCH_COST >= 2
3956 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3957 && simple_operand_p (rl_arg)
3958 && simple_operand_p (rr_arg))
3959 return build (code, truth_type, lhs, rhs);
3960
3961 /* See if the comparisons can be merged. Then get all the parameters for
3962 each side. */
3963
3964 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3965 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3966 return 0;
3967
3968 volatilep = 0;
3969 ll_inner = decode_field_reference (ll_arg,
3970 &ll_bitsize, &ll_bitpos, &ll_mode,
3971 &ll_unsignedp, &volatilep, &ll_mask,
3972 &ll_and_mask);
3973 lr_inner = decode_field_reference (lr_arg,
3974 &lr_bitsize, &lr_bitpos, &lr_mode,
3975 &lr_unsignedp, &volatilep, &lr_mask,
3976 &lr_and_mask);
3977 rl_inner = decode_field_reference (rl_arg,
3978 &rl_bitsize, &rl_bitpos, &rl_mode,
3979 &rl_unsignedp, &volatilep, &rl_mask,
3980 &rl_and_mask);
3981 rr_inner = decode_field_reference (rr_arg,
3982 &rr_bitsize, &rr_bitpos, &rr_mode,
3983 &rr_unsignedp, &volatilep, &rr_mask,
3984 &rr_and_mask);
3985
3986 /* It must be true that the inner operation on the lhs of each
3987 comparison must be the same if we are to be able to do anything.
3988 Then see if we have constants. If not, the same must be true for
3989 the rhs's. */
3990 if (volatilep || ll_inner == 0 || rl_inner == 0
3991 || ! operand_equal_p (ll_inner, rl_inner, 0))
3992 return 0;
3993
3994 if (TREE_CODE (lr_arg) == INTEGER_CST
3995 && TREE_CODE (rr_arg) == INTEGER_CST)
3996 l_const = lr_arg, r_const = rr_arg;
3997 else if (lr_inner == 0 || rr_inner == 0
3998 || ! operand_equal_p (lr_inner, rr_inner, 0))
3999 return 0;
4000 else
4001 l_const = r_const = 0;
4002
4003 /* If either comparison code is not correct for our logical operation,
4004 fail. However, we can convert a one-bit comparison against zero into
4005 the opposite comparison against that bit being set in the field. */
4006
4007 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4008 if (lcode != wanted_code)
4009 {
4010 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4011 {
4012 /* Make the left operand unsigned, since we are only interested
4013 in the value of one bit. Otherwise we are doing the wrong
4014 thing below. */
4015 ll_unsignedp = 1;
4016 l_const = ll_mask;
4017 }
4018 else
4019 return 0;
4020 }
4021
4022 /* This is analogous to the code for l_const above. */
4023 if (rcode != wanted_code)
4024 {
4025 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4026 {
4027 rl_unsignedp = 1;
4028 r_const = rl_mask;
4029 }
4030 else
4031 return 0;
4032 }
4033
4034 /* See if we can find a mode that contains both fields being compared on
4035 the left. If we can't, fail. Otherwise, update all constants and masks
4036 to be relative to a field of that size. */
4037 first_bit = MIN (ll_bitpos, rl_bitpos);
4038 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4039 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4040 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4041 volatilep);
4042 if (lnmode == VOIDmode)
4043 return 0;
4044
4045 lnbitsize = GET_MODE_BITSIZE (lnmode);
4046 lnbitpos = first_bit & ~ (lnbitsize - 1);
4047 lntype = type_for_size (lnbitsize, 1);
4048 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4049
4050 if (BYTES_BIG_ENDIAN)
4051 {
4052 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4053 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4054 }
4055
4056 ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
4057 size_int (xll_bitpos), 0);
4058 rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
4059 size_int (xrl_bitpos), 0);
4060
4061 if (l_const)
4062 {
4063 l_const = convert (lntype, l_const);
4064 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4065 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4066 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4067 fold (build1 (BIT_NOT_EXPR,
4068 lntype, ll_mask)),
4069 0)))
4070 {
4071 warning ("comparison is always %d", wanted_code == NE_EXPR);
4072
4073 return convert (truth_type,
4074 wanted_code == NE_EXPR
4075 ? integer_one_node : integer_zero_node);
4076 }
4077 }
4078 if (r_const)
4079 {
4080 r_const = convert (lntype, r_const);
4081 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4082 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4083 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4084 fold (build1 (BIT_NOT_EXPR,
4085 lntype, rl_mask)),
4086 0)))
4087 {
4088 warning ("comparison is always %d", wanted_code == NE_EXPR);
4089
4090 return convert (truth_type,
4091 wanted_code == NE_EXPR
4092 ? integer_one_node : integer_zero_node);
4093 }
4094 }
4095
4096 /* If the right sides are not constant, do the same for it. Also,
4097 disallow this optimization if a size or signedness mismatch occurs
4098 between the left and right sides. */
4099 if (l_const == 0)
4100 {
4101 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4102 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4103 /* Make sure the two fields on the right
4104 correspond to the left without being swapped. */
4105 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4106 return 0;
4107
4108 first_bit = MIN (lr_bitpos, rr_bitpos);
4109 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4110 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4111 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4112 volatilep);
4113 if (rnmode == VOIDmode)
4114 return 0;
4115
4116 rnbitsize = GET_MODE_BITSIZE (rnmode);
4117 rnbitpos = first_bit & ~ (rnbitsize - 1);
4118 rntype = type_for_size (rnbitsize, 1);
4119 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4120
4121 if (BYTES_BIG_ENDIAN)
4122 {
4123 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4124 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4125 }
4126
4127 lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
4128 size_int (xlr_bitpos), 0);
4129 rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
4130 size_int (xrr_bitpos), 0);
4131
4132 /* Make a mask that corresponds to both fields being compared.
4133 Do this for both items being compared. If the operands are the
4134 same size and the bits being compared are in the same position
4135 then we can do this by masking both and comparing the masked
4136 results. */
4137 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4138 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4139 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4140 {
4141 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4142 ll_unsignedp || rl_unsignedp);
4143 if (! all_ones_mask_p (ll_mask, lnbitsize))
4144 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
4145
4146 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4147 lr_unsignedp || rr_unsignedp);
4148 if (! all_ones_mask_p (lr_mask, rnbitsize))
4149 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
4150
4151 return build (wanted_code, truth_type, lhs, rhs);
4152 }
4153
4154 /* There is still another way we can do something: If both pairs of
4155 fields being compared are adjacent, we may be able to make a wider
4156 field containing them both.
4157
4158 Note that we still must mask the lhs/rhs expressions. Furthermore,
4159 the mask must be shifted to account for the shift done by
4160 make_bit_field_ref. */
4161 if ((ll_bitsize + ll_bitpos == rl_bitpos
4162 && lr_bitsize + lr_bitpos == rr_bitpos)
4163 || (ll_bitpos == rl_bitpos + rl_bitsize
4164 && lr_bitpos == rr_bitpos + rr_bitsize))
4165 {
4166 tree type;
4167
4168 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4169 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4170 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4171 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4172
4173 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4174 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4175 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4176 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4177
4178 /* Convert to the smaller type before masking out unwanted bits. */
4179 type = lntype;
4180 if (lntype != rntype)
4181 {
4182 if (lnbitsize > rnbitsize)
4183 {
4184 lhs = convert (rntype, lhs);
4185 ll_mask = convert (rntype, ll_mask);
4186 type = rntype;
4187 }
4188 else if (lnbitsize < rnbitsize)
4189 {
4190 rhs = convert (lntype, rhs);
4191 lr_mask = convert (lntype, lr_mask);
4192 type = lntype;
4193 }
4194 }
4195
4196 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4197 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
4198
4199 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4200 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
4201
4202 return build (wanted_code, truth_type, lhs, rhs);
4203 }
4204
4205 return 0;
4206 }
4207
4208 /* Handle the case of comparisons with constants. If there is something in
4209 common between the masks, those bits of the constants must be the same.
4210 If not, the condition is always false. Test for this to avoid generating
4211 incorrect code below. */
4212 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4213 if (! integer_zerop (result)
4214 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4215 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4216 {
4217 if (wanted_code == NE_EXPR)
4218 {
4219 warning ("`or' of unmatched not-equal tests is always 1");
4220 return convert (truth_type, integer_one_node);
4221 }
4222 else
4223 {
4224 warning ("`and' of mutually exclusive equal-tests is always 0");
4225 return convert (truth_type, integer_zero_node);
4226 }
4227 }
4228
4229 /* Construct the expression we will return. First get the component
4230 reference we will make. Unless the mask is all ones the width of
4231 that field, perform the mask operation. Then compare with the
4232 merged constant. */
4233 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4234 ll_unsignedp || rl_unsignedp);
4235
4236 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4237 if (! all_ones_mask_p (ll_mask, lnbitsize))
4238 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
4239
4240 return build (wanted_code, truth_type, result,
4241 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4242 }
4243 \f
4244 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4245 constant. */
4246
4247 static tree
4248 optimize_minmax_comparison (t)
4249 tree t;
4250 {
4251 tree type = TREE_TYPE (t);
4252 tree arg0 = TREE_OPERAND (t, 0);
4253 enum tree_code op_code;
4254 tree comp_const = TREE_OPERAND (t, 1);
4255 tree minmax_const;
4256 int consts_equal, consts_lt;
4257 tree inner;
4258
4259 STRIP_SIGN_NOPS (arg0);
4260
4261 op_code = TREE_CODE (arg0);
4262 minmax_const = TREE_OPERAND (arg0, 1);
4263 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4264 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4265 inner = TREE_OPERAND (arg0, 0);
4266
4267 /* If something does not permit us to optimize, return the original tree. */
4268 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4269 || TREE_CODE (comp_const) != INTEGER_CST
4270 || TREE_CONSTANT_OVERFLOW (comp_const)
4271 || TREE_CODE (minmax_const) != INTEGER_CST
4272 || TREE_CONSTANT_OVERFLOW (minmax_const))
4273 return t;
4274
4275 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4276 and GT_EXPR, doing the rest with recursive calls using logical
4277 simplifications. */
4278 switch (TREE_CODE (t))
4279 {
4280 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4281 return
4282 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4283
4284 case GE_EXPR:
4285 return
4286 fold (build (TRUTH_ORIF_EXPR, type,
4287 optimize_minmax_comparison
4288 (build (EQ_EXPR, type, arg0, comp_const)),
4289 optimize_minmax_comparison
4290 (build (GT_EXPR, type, arg0, comp_const))));
4291
4292 case EQ_EXPR:
4293 if (op_code == MAX_EXPR && consts_equal)
4294 /* MAX (X, 0) == 0 -> X <= 0 */
4295 return fold (build (LE_EXPR, type, inner, comp_const));
4296
4297 else if (op_code == MAX_EXPR && consts_lt)
4298 /* MAX (X, 0) == 5 -> X == 5 */
4299 return fold (build (EQ_EXPR, type, inner, comp_const));
4300
4301 else if (op_code == MAX_EXPR)
4302 /* MAX (X, 0) == -1 -> false */
4303 return omit_one_operand (type, integer_zero_node, inner);
4304
4305 else if (consts_equal)
4306 /* MIN (X, 0) == 0 -> X >= 0 */
4307 return fold (build (GE_EXPR, type, inner, comp_const));
4308
4309 else if (consts_lt)
4310 /* MIN (X, 0) == 5 -> false */
4311 return omit_one_operand (type, integer_zero_node, inner);
4312
4313 else
4314 /* MIN (X, 0) == -1 -> X == -1 */
4315 return fold (build (EQ_EXPR, type, inner, comp_const));
4316
4317 case GT_EXPR:
4318 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4319 /* MAX (X, 0) > 0 -> X > 0
4320 MAX (X, 0) > 5 -> X > 5 */
4321 return fold (build (GT_EXPR, type, inner, comp_const));
4322
4323 else if (op_code == MAX_EXPR)
4324 /* MAX (X, 0) > -1 -> true */
4325 return omit_one_operand (type, integer_one_node, inner);
4326
4327 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4328 /* MIN (X, 0) > 0 -> false
4329 MIN (X, 0) > 5 -> false */
4330 return omit_one_operand (type, integer_zero_node, inner);
4331
4332 else
4333 /* MIN (X, 0) > -1 -> X > -1 */
4334 return fold (build (GT_EXPR, type, inner, comp_const));
4335
4336 default:
4337 return t;
4338 }
4339 }
4340 \f
4341 /* T is an integer expression that is being multiplied, divided, or taken a
4342 modulus (CODE says which and what kind of divide or modulus) by a
4343 constant C. See if we can eliminate that operation by folding it with
4344 other operations already in T. WIDE_TYPE, if non-null, is a type that
4345 should be used for the computation if wider than our type.
4346
4347 For example, if we are dividing (X * 8) + (Y + 16) by 4, we can return
4348 (X * 2) + (Y + 4). We must, however, be assured that either the original
4349 expression would not overflow or that overflow is undefined for the type
4350 in the language in question.
4351
4352 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4353 the machine has a multiply-accumulate insn or that this is part of an
4354 addressing calculation.
4355
4356 If we return a non-null expression, it is an equivalent form of the
4357 original computation, but need not be in the original type. */
4358
4359 static tree
4360 extract_muldiv (t, c, code, wide_type)
4361 tree t;
4362 tree c;
4363 enum tree_code code;
4364 tree wide_type;
4365 {
4366 tree type = TREE_TYPE (t);
4367 enum tree_code tcode = TREE_CODE (t);
4368 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4369 > GET_MODE_SIZE (TYPE_MODE (type)))
4370 ? wide_type : type);
4371 tree t1, t2;
4372 int same_p = tcode == code;
4373 tree op0 = NULL_TREE, op1 = NULL_TREE;
4374
4375 /* Don't deal with constants of zero here; they confuse the code below. */
4376 if (integer_zerop (c))
4377 return NULL_TREE;
4378
4379 if (TREE_CODE_CLASS (tcode) == '1')
4380 op0 = TREE_OPERAND (t, 0);
4381
4382 if (TREE_CODE_CLASS (tcode) == '2')
4383 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4384
4385 /* Note that we need not handle conditional operations here since fold
4386 already handles those cases. So just do arithmetic here. */
4387 switch (tcode)
4388 {
4389 case INTEGER_CST:
4390 /* For a constant, we can always simplify if we are a multiply
4391 or (for divide and modulus) if it is a multiple of our constant. */
4392 if (code == MULT_EXPR
4393 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4394 return const_binop (code, convert (ctype, t), convert (ctype, c), 0);
4395 break;
4396
4397 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4398 /* If op0 is an expression, and is unsigned, and the type is
4399 smaller than ctype, then we cannot widen the expression. */
4400 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4401 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4402 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4403 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4404 && TREE_UNSIGNED (TREE_TYPE (op0))
4405 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4406 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4407 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4408 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4409 break;
4410
4411 /* Pass the constant down and see if we can make a simplification. If
4412 we can, replace this expression with the inner simplification for
4413 possible later conversion to our or some other type. */
4414 if (0 != (t1 = extract_muldiv (op0, convert (TREE_TYPE (op0), c), code,
4415 code == MULT_EXPR ? ctype : NULL_TREE)))
4416 return t1;
4417 break;
4418
4419 case NEGATE_EXPR: case ABS_EXPR:
4420 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4421 return fold (build1 (tcode, ctype, convert (ctype, t1)));
4422 break;
4423
4424 case MIN_EXPR: case MAX_EXPR:
4425 /* If widening the type changes the signedness, then we can't perform
4426 this optimization as that changes the result. */
4427 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4428 break;
4429
4430 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4431 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4432 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4433 {
4434 if (tree_int_cst_sgn (c) < 0)
4435 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4436
4437 return fold (build (tcode, ctype, convert (ctype, t1),
4438 convert (ctype, t2)));
4439 }
4440 break;
4441
4442 case WITH_RECORD_EXPR:
4443 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4444 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4445 TREE_OPERAND (t, 1));
4446 break;
4447
4448 case SAVE_EXPR:
4449 /* If this has not been evaluated and the operand has no side effects,
4450 we can see if we can do something inside it and make a new one.
4451 Note that this test is overly conservative since we can do this
4452 if the only reason it had side effects is that it was another
4453 similar SAVE_EXPR, but that isn't worth bothering with. */
4454 if (SAVE_EXPR_RTL (t) == 0 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0))
4455 && 0 != (t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code,
4456 wide_type)))
4457 {
4458 t1 = save_expr (t1);
4459 if (SAVE_EXPR_PERSISTENT_P (t) && TREE_CODE (t1) == SAVE_EXPR)
4460 SAVE_EXPR_PERSISTENT_P (t1) = 1;
4461 if (is_pending_size (t))
4462 put_pending_size (t1);
4463 return t1;
4464 }
4465 break;
4466
4467 case LSHIFT_EXPR: case RSHIFT_EXPR:
4468 /* If the second operand is constant, this is a multiplication
4469 or floor division, by a power of two, so we can treat it that
4470 way unless the multiplier or divisor overflows. */
4471 if (TREE_CODE (op1) == INTEGER_CST
4472 /* const_binop may not detect overflow correctly,
4473 so check for it explicitly here. */
4474 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4475 && TREE_INT_CST_HIGH (op1) == 0
4476 && 0 != (t1 = convert (ctype,
4477 const_binop (LSHIFT_EXPR, size_one_node,
4478 op1, 0)))
4479 && ! TREE_OVERFLOW (t1))
4480 return extract_muldiv (build (tcode == LSHIFT_EXPR
4481 ? MULT_EXPR : FLOOR_DIV_EXPR,
4482 ctype, convert (ctype, op0), t1),
4483 c, code, wide_type);
4484 break;
4485
4486 case PLUS_EXPR: case MINUS_EXPR:
4487 /* See if we can eliminate the operation on both sides. If we can, we
4488 can return a new PLUS or MINUS. If we can't, the only remaining
4489 cases where we can do anything are if the second operand is a
4490 constant. */
4491 t1 = extract_muldiv (op0, c, code, wide_type);
4492 t2 = extract_muldiv (op1, c, code, wide_type);
4493 if (t1 != 0 && t2 != 0
4494 && (code == MULT_EXPR
4495 /* If not multiplication, we can only do this if either operand
4496 is divisible by c. */
4497 || multiple_of_p (ctype, op0, c)
4498 || multiple_of_p (ctype, op1, c)))
4499 return fold (build (tcode, ctype, convert (ctype, t1),
4500 convert (ctype, t2)));
4501
4502 /* If this was a subtraction, negate OP1 and set it to be an addition.
4503 This simplifies the logic below. */
4504 if (tcode == MINUS_EXPR)
4505 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4506
4507 if (TREE_CODE (op1) != INTEGER_CST)
4508 break;
4509
4510 /* If either OP1 or C are negative, this optimization is not safe for
4511 some of the division and remainder types while for others we need
4512 to change the code. */
4513 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4514 {
4515 if (code == CEIL_DIV_EXPR)
4516 code = FLOOR_DIV_EXPR;
4517 else if (code == CEIL_MOD_EXPR)
4518 code = FLOOR_MOD_EXPR;
4519 else if (code == FLOOR_DIV_EXPR)
4520 code = CEIL_DIV_EXPR;
4521 else if (code == FLOOR_MOD_EXPR)
4522 code = CEIL_MOD_EXPR;
4523 else if (code != MULT_EXPR)
4524 break;
4525 }
4526
4527 /* If it's a multiply or a division/modulus operation of a multiple
4528 of our constant, do the operation and verify it doesn't overflow. */
4529 if (code == MULT_EXPR
4530 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4531 {
4532 op1 = const_binop (code, convert (ctype, op1), convert (ctype, c), 0);
4533 if (op1 == 0 || TREE_OVERFLOW (op1))
4534 break;
4535 }
4536 else
4537 break;
4538
4539 /* If we have an unsigned type is not a sizetype, we cannot widen
4540 the operation since it will change the result if the original
4541 computation overflowed. */
4542 if (TREE_UNSIGNED (ctype)
4543 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4544 && ctype != type)
4545 break;
4546
4547 /* If we were able to eliminate our operation from the first side,
4548 apply our operation to the second side and reform the PLUS. */
4549 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4550 return fold (build (tcode, ctype, convert (ctype, t1), op1));
4551
4552 /* The last case is if we are a multiply. In that case, we can
4553 apply the distributive law to commute the multiply and addition
4554 if the multiplication of the constants doesn't overflow. */
4555 if (code == MULT_EXPR)
4556 return fold (build (tcode, ctype, fold (build (code, ctype,
4557 convert (ctype, op0),
4558 convert (ctype, c))),
4559 op1));
4560
4561 break;
4562
4563 case MULT_EXPR:
4564 /* We have a special case here if we are doing something like
4565 (C * 8) % 4 since we know that's zero. */
4566 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4567 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4568 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4569 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4570 return omit_one_operand (type, integer_zero_node, op0);
4571
4572 /* ... fall through ... */
4573
4574 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4575 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4576 /* If we can extract our operation from the LHS, do so and return a
4577 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4578 do something only if the second operand is a constant. */
4579 if (same_p
4580 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4581 return fold (build (tcode, ctype, convert (ctype, t1),
4582 convert (ctype, op1)));
4583 else if (tcode == MULT_EXPR && code == MULT_EXPR
4584 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4585 return fold (build (tcode, ctype, convert (ctype, op0),
4586 convert (ctype, t1)));
4587 else if (TREE_CODE (op1) != INTEGER_CST)
4588 return 0;
4589
4590 /* If these are the same operation types, we can associate them
4591 assuming no overflow. */
4592 if (tcode == code
4593 && 0 != (t1 = const_binop (MULT_EXPR, convert (ctype, op1),
4594 convert (ctype, c), 0))
4595 && ! TREE_OVERFLOW (t1))
4596 return fold (build (tcode, ctype, convert (ctype, op0), t1));
4597
4598 /* If these operations "cancel" each other, we have the main
4599 optimizations of this pass, which occur when either constant is a
4600 multiple of the other, in which case we replace this with either an
4601 operation or CODE or TCODE.
4602
4603 If we have an unsigned type that is not a sizetype, we canot do
4604 this since it will change the result if the original computation
4605 overflowed. */
4606 if ((! TREE_UNSIGNED (ctype)
4607 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4608 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4609 || (tcode == MULT_EXPR
4610 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4611 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4612 {
4613 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4614 return fold (build (tcode, ctype, convert (ctype, op0),
4615 convert (ctype,
4616 const_binop (TRUNC_DIV_EXPR,
4617 op1, c, 0))));
4618 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4619 return fold (build (code, ctype, convert (ctype, op0),
4620 convert (ctype,
4621 const_binop (TRUNC_DIV_EXPR,
4622 c, op1, 0))));
4623 }
4624 break;
4625
4626 default:
4627 break;
4628 }
4629
4630 return 0;
4631 }
4632 \f
4633 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4634 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4635 that we may sometimes modify the tree. */
4636
4637 static tree
4638 strip_compound_expr (t, s)
4639 tree t;
4640 tree s;
4641 {
4642 enum tree_code code = TREE_CODE (t);
4643
4644 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4645 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4646 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4647 return TREE_OPERAND (t, 1);
4648
4649 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4650 don't bother handling any other types. */
4651 else if (code == COND_EXPR)
4652 {
4653 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4654 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4655 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4656 }
4657 else if (TREE_CODE_CLASS (code) == '1')
4658 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4659 else if (TREE_CODE_CLASS (code) == '<'
4660 || TREE_CODE_CLASS (code) == '2')
4661 {
4662 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4663 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4664 }
4665
4666 return t;
4667 }
4668 \f
4669 /* Return a node which has the indicated constant VALUE (either 0 or
4670 1), and is of the indicated TYPE. */
4671
4672 static tree
4673 constant_boolean_node (value, type)
4674 int value;
4675 tree type;
4676 {
4677 if (type == integer_type_node)
4678 return value ? integer_one_node : integer_zero_node;
4679 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4680 return truthvalue_conversion (value ? integer_one_node :
4681 integer_zero_node);
4682 else
4683 {
4684 tree t = build_int_2 (value, 0);
4685
4686 TREE_TYPE (t) = type;
4687 return t;
4688 }
4689 }
4690
4691 /* Utility function for the following routine, to see how complex a nesting of
4692 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4693 we don't care (to avoid spending too much time on complex expressions.). */
4694
4695 static int
4696 count_cond (expr, lim)
4697 tree expr;
4698 int lim;
4699 {
4700 int ctrue, cfalse;
4701
4702 if (TREE_CODE (expr) != COND_EXPR)
4703 return 0;
4704 else if (lim <= 0)
4705 return 0;
4706
4707 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4708 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4709 return MIN (lim, 1 + ctrue + cfalse);
4710 }
4711
4712 /* Transform `a + (b ? x : y)' into `x ? (a + b) : (a + y)'.
4713 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4714 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4715 expression, and ARG to `a'. If COND_FIRST_P is non-zero, then the
4716 COND is the first argument to CODE; otherwise (as in the example
4717 given here), it is the second argument. TYPE is the type of the
4718 original expression. */
4719
4720 static tree
4721 fold_binary_op_with_conditional_arg (code, type, cond, arg, cond_first_p)
4722 enum tree_code code;
4723 tree type;
4724 tree cond;
4725 tree arg;
4726 int cond_first_p;
4727 {
4728 tree test, true_value, false_value;
4729 tree lhs = NULL_TREE;
4730 tree rhs = NULL_TREE;
4731 /* In the end, we'll produce a COND_EXPR. Both arms of the
4732 conditional expression will be binary operations. The left-hand
4733 side of the expression to be executed if the condition is true
4734 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4735 of the expression to be executed if the condition is true will be
4736 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analagous --
4737 but apply to the expression to be executed if the conditional is
4738 false. */
4739 tree *true_lhs;
4740 tree *true_rhs;
4741 tree *false_lhs;
4742 tree *false_rhs;
4743 /* These are the codes to use for the left-hand side and right-hand
4744 side of the COND_EXPR. Normally, they are the same as CODE. */
4745 enum tree_code lhs_code = code;
4746 enum tree_code rhs_code = code;
4747 /* And these are the types of the expressions. */
4748 tree lhs_type = type;
4749 tree rhs_type = type;
4750
4751 if (cond_first_p)
4752 {
4753 true_rhs = false_rhs = &arg;
4754 true_lhs = &true_value;
4755 false_lhs = &false_value;
4756 }
4757 else
4758 {
4759 true_lhs = false_lhs = &arg;
4760 true_rhs = &true_value;
4761 false_rhs = &false_value;
4762 }
4763
4764 if (TREE_CODE (cond) == COND_EXPR)
4765 {
4766 test = TREE_OPERAND (cond, 0);
4767 true_value = TREE_OPERAND (cond, 1);
4768 false_value = TREE_OPERAND (cond, 2);
4769 /* If this operand throws an expression, then it does not make
4770 sense to try to perform a logical or arithmetic operation
4771 involving it. Instead of building `a + throw 3' for example,
4772 we simply build `a, throw 3'. */
4773 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4774 {
4775 lhs_code = COMPOUND_EXPR;
4776 if (!cond_first_p)
4777 lhs_type = void_type_node;
4778 }
4779 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4780 {
4781 rhs_code = COMPOUND_EXPR;
4782 if (!cond_first_p)
4783 rhs_type = void_type_node;
4784 }
4785 }
4786 else
4787 {
4788 tree testtype = TREE_TYPE (cond);
4789 test = cond;
4790 true_value = convert (testtype, integer_one_node);
4791 false_value = convert (testtype, integer_zero_node);
4792 }
4793
4794 /* If ARG is complex we want to make sure we only evaluate
4795 it once. Though this is only required if it is volatile, it
4796 might be more efficient even if it is not. However, if we
4797 succeed in folding one part to a constant, we do not need
4798 to make this SAVE_EXPR. Since we do this optimization
4799 primarily to see if we do end up with constant and this
4800 SAVE_EXPR interferes with later optimizations, suppressing
4801 it when we can is important.
4802
4803 If we are not in a function, we can't make a SAVE_EXPR, so don't
4804 try to do so. Don't try to see if the result is a constant
4805 if an arm is a COND_EXPR since we get exponential behavior
4806 in that case. */
4807
4808 if (TREE_CODE (arg) != SAVE_EXPR && ! TREE_CONSTANT (arg)
4809 && global_bindings_p () == 0
4810 && ((TREE_CODE (arg) != VAR_DECL
4811 && TREE_CODE (arg) != PARM_DECL)
4812 || TREE_SIDE_EFFECTS (arg)))
4813 {
4814 if (TREE_CODE (true_value) != COND_EXPR)
4815 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4816
4817 if (TREE_CODE (false_value) != COND_EXPR)
4818 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4819
4820 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4821 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4822 arg = save_expr (arg), lhs = rhs = 0;
4823 }
4824
4825 if (lhs == 0)
4826 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4827 if (rhs == 0)
4828 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4829
4830 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4831
4832 if (TREE_CODE (arg) == SAVE_EXPR)
4833 return build (COMPOUND_EXPR, type,
4834 convert (void_type_node, arg),
4835 strip_compound_expr (test, arg));
4836 else
4837 return convert (type, test);
4838 }
4839
4840 \f
4841 /* Perform constant folding and related simplification of EXPR.
4842 The related simplifications include x*1 => x, x*0 => 0, etc.,
4843 and application of the associative law.
4844 NOP_EXPR conversions may be removed freely (as long as we
4845 are careful not to change the C type of the overall expression)
4846 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4847 but we can constant-fold them if they have constant operands. */
4848
4849 tree
4850 fold (expr)
4851 tree expr;
4852 {
4853 register tree t = expr;
4854 tree t1 = NULL_TREE;
4855 tree tem;
4856 tree type = TREE_TYPE (expr);
4857 register tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4858 register enum tree_code code = TREE_CODE (t);
4859 register int kind = TREE_CODE_CLASS (code);
4860 int invert;
4861 /* WINS will be nonzero when the switch is done
4862 if all operands are constant. */
4863 int wins = 1;
4864
4865 /* Don't try to process an RTL_EXPR since its operands aren't trees.
4866 Likewise for a SAVE_EXPR that's already been evaluated. */
4867 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t)) != 0)
4868 return t;
4869
4870 /* Return right away if a constant. */
4871 if (kind == 'c')
4872 return t;
4873
4874 #ifdef MAX_INTEGER_COMPUTATION_MODE
4875 check_max_integer_computation_mode (expr);
4876 #endif
4877
4878 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
4879 {
4880 tree subop;
4881
4882 /* Special case for conversion ops that can have fixed point args. */
4883 arg0 = TREE_OPERAND (t, 0);
4884
4885 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
4886 if (arg0 != 0)
4887 STRIP_SIGN_NOPS (arg0);
4888
4889 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
4890 subop = TREE_REALPART (arg0);
4891 else
4892 subop = arg0;
4893
4894 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
4895 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4896 && TREE_CODE (subop) != REAL_CST
4897 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4898 )
4899 /* Note that TREE_CONSTANT isn't enough:
4900 static var addresses are constant but we can't
4901 do arithmetic on them. */
4902 wins = 0;
4903 }
4904 else if (IS_EXPR_CODE_CLASS (kind) || kind == 'r')
4905 {
4906 register int len = TREE_CODE_LENGTH (code);
4907 register int i;
4908 for (i = 0; i < len; i++)
4909 {
4910 tree op = TREE_OPERAND (t, i);
4911 tree subop;
4912
4913 if (op == 0)
4914 continue; /* Valid for CALL_EXPR, at least. */
4915
4916 if (kind == '<' || code == RSHIFT_EXPR)
4917 {
4918 /* Signedness matters here. Perhaps we can refine this
4919 later. */
4920 STRIP_SIGN_NOPS (op);
4921 }
4922 else
4923 /* Strip any conversions that don't change the mode. */
4924 STRIP_NOPS (op);
4925
4926 if (TREE_CODE (op) == COMPLEX_CST)
4927 subop = TREE_REALPART (op);
4928 else
4929 subop = op;
4930
4931 if (TREE_CODE (subop) != INTEGER_CST
4932 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4933 && TREE_CODE (subop) != REAL_CST
4934 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4935 )
4936 /* Note that TREE_CONSTANT isn't enough:
4937 static var addresses are constant but we can't
4938 do arithmetic on them. */
4939 wins = 0;
4940
4941 if (i == 0)
4942 arg0 = op;
4943 else if (i == 1)
4944 arg1 = op;
4945 }
4946 }
4947
4948 /* If this is a commutative operation, and ARG0 is a constant, move it
4949 to ARG1 to reduce the number of tests below. */
4950 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
4951 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
4952 || code == BIT_AND_EXPR)
4953 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
4954 {
4955 tem = arg0; arg0 = arg1; arg1 = tem;
4956
4957 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
4958 TREE_OPERAND (t, 1) = tem;
4959 }
4960
4961 /* Now WINS is set as described above,
4962 ARG0 is the first operand of EXPR,
4963 and ARG1 is the second operand (if it has more than one operand).
4964
4965 First check for cases where an arithmetic operation is applied to a
4966 compound, conditional, or comparison operation. Push the arithmetic
4967 operation inside the compound or conditional to see if any folding
4968 can then be done. Convert comparison to conditional for this purpose.
4969 The also optimizes non-constant cases that used to be done in
4970 expand_expr.
4971
4972 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
4973 one of the operands is a comparison and the other is a comparison, a
4974 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
4975 code below would make the expression more complex. Change it to a
4976 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
4977 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
4978
4979 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
4980 || code == EQ_EXPR || code == NE_EXPR)
4981 && ((truth_value_p (TREE_CODE (arg0))
4982 && (truth_value_p (TREE_CODE (arg1))
4983 || (TREE_CODE (arg1) == BIT_AND_EXPR
4984 && integer_onep (TREE_OPERAND (arg1, 1)))))
4985 || (truth_value_p (TREE_CODE (arg1))
4986 && (truth_value_p (TREE_CODE (arg0))
4987 || (TREE_CODE (arg0) == BIT_AND_EXPR
4988 && integer_onep (TREE_OPERAND (arg0, 1)))))))
4989 {
4990 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
4991 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
4992 : TRUTH_XOR_EXPR,
4993 type, arg0, arg1));
4994
4995 if (code == EQ_EXPR)
4996 t = invert_truthvalue (t);
4997
4998 return t;
4999 }
5000
5001 if (TREE_CODE_CLASS (code) == '1')
5002 {
5003 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5004 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5005 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5006 else if (TREE_CODE (arg0) == COND_EXPR)
5007 {
5008 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5009 fold (build1 (code, type, TREE_OPERAND (arg0, 1))),
5010 fold (build1 (code, type, TREE_OPERAND (arg0, 2)))));
5011
5012 /* If this was a conversion, and all we did was to move into
5013 inside the COND_EXPR, bring it back out. But leave it if
5014 it is a conversion from integer to integer and the
5015 result precision is no wider than a word since such a
5016 conversion is cheap and may be optimized away by combine,
5017 while it couldn't if it were outside the COND_EXPR. Then return
5018 so we don't get into an infinite recursion loop taking the
5019 conversion out and then back in. */
5020
5021 if ((code == NOP_EXPR || code == CONVERT_EXPR
5022 || code == NON_LVALUE_EXPR)
5023 && TREE_CODE (t) == COND_EXPR
5024 && TREE_CODE (TREE_OPERAND (t, 1)) == code
5025 && TREE_CODE (TREE_OPERAND (t, 2)) == code
5026 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
5027 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
5028 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
5029 && (INTEGRAL_TYPE_P
5030 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
5031 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
5032 t = build1 (code, type,
5033 build (COND_EXPR,
5034 TREE_TYPE (TREE_OPERAND
5035 (TREE_OPERAND (t, 1), 0)),
5036 TREE_OPERAND (t, 0),
5037 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
5038 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
5039 return t;
5040 }
5041 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5042 return fold (build (COND_EXPR, type, arg0,
5043 fold (build1 (code, type, integer_one_node)),
5044 fold (build1 (code, type, integer_zero_node))));
5045 }
5046 else if (TREE_CODE_CLASS (code) == '2'
5047 || TREE_CODE_CLASS (code) == '<')
5048 {
5049 if (TREE_CODE (arg1) == COMPOUND_EXPR)
5050 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5051 fold (build (code, type,
5052 arg0, TREE_OPERAND (arg1, 1))));
5053 else if ((TREE_CODE (arg1) == COND_EXPR
5054 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
5055 && TREE_CODE_CLASS (code) != '<'))
5056 && (TREE_CODE (arg0) != COND_EXPR
5057 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5058 && (! TREE_SIDE_EFFECTS (arg0)
5059 || (global_bindings_p () == 0
5060 && ! contains_placeholder_p (arg0))))
5061 return
5062 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5063 /*cond_first_p=*/0);
5064 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
5065 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5066 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5067 else if ((TREE_CODE (arg0) == COND_EXPR
5068 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5069 && TREE_CODE_CLASS (code) != '<'))
5070 && (TREE_CODE (arg1) != COND_EXPR
5071 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5072 && (! TREE_SIDE_EFFECTS (arg1)
5073 || (global_bindings_p () == 0
5074 && ! contains_placeholder_p (arg1))))
5075 return
5076 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5077 /*cond_first_p=*/1);
5078 }
5079 else if (TREE_CODE_CLASS (code) == '<'
5080 && TREE_CODE (arg0) == COMPOUND_EXPR)
5081 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5082 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5083 else if (TREE_CODE_CLASS (code) == '<'
5084 && TREE_CODE (arg1) == COMPOUND_EXPR)
5085 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5086 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5087
5088 switch (code)
5089 {
5090 case INTEGER_CST:
5091 case REAL_CST:
5092 case STRING_CST:
5093 case COMPLEX_CST:
5094 case CONSTRUCTOR:
5095 return t;
5096
5097 case CONST_DECL:
5098 return fold (DECL_INITIAL (t));
5099
5100 case NOP_EXPR:
5101 case FLOAT_EXPR:
5102 case CONVERT_EXPR:
5103 case FIX_TRUNC_EXPR:
5104 /* Other kinds of FIX are not handled properly by fold_convert. */
5105
5106 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
5107 return TREE_OPERAND (t, 0);
5108
5109 /* Handle cases of two conversions in a row. */
5110 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5111 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5112 {
5113 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5114 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5115 tree final_type = TREE_TYPE (t);
5116 int inside_int = INTEGRAL_TYPE_P (inside_type);
5117 int inside_ptr = POINTER_TYPE_P (inside_type);
5118 int inside_float = FLOAT_TYPE_P (inside_type);
5119 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5120 int inside_unsignedp = TREE_UNSIGNED (inside_type);
5121 int inter_int = INTEGRAL_TYPE_P (inter_type);
5122 int inter_ptr = POINTER_TYPE_P (inter_type);
5123 int inter_float = FLOAT_TYPE_P (inter_type);
5124 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5125 int inter_unsignedp = TREE_UNSIGNED (inter_type);
5126 int final_int = INTEGRAL_TYPE_P (final_type);
5127 int final_ptr = POINTER_TYPE_P (final_type);
5128 int final_float = FLOAT_TYPE_P (final_type);
5129 unsigned int final_prec = TYPE_PRECISION (final_type);
5130 int final_unsignedp = TREE_UNSIGNED (final_type);
5131
5132 /* In addition to the cases of two conversions in a row
5133 handled below, if we are converting something to its own
5134 type via an object of identical or wider precision, neither
5135 conversion is needed. */
5136 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
5137 && ((inter_int && final_int) || (inter_float && final_float))
5138 && inter_prec >= final_prec)
5139 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5140
5141 /* Likewise, if the intermediate and final types are either both
5142 float or both integer, we don't need the middle conversion if
5143 it is wider than the final type and doesn't change the signedness
5144 (for integers). Avoid this if the final type is a pointer
5145 since then we sometimes need the inner conversion. Likewise if
5146 the outer has a precision not equal to the size of its mode. */
5147 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5148 || (inter_float && inside_float))
5149 && inter_prec >= inside_prec
5150 && (inter_float || inter_unsignedp == inside_unsignedp)
5151 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5152 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5153 && ! final_ptr)
5154 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5155
5156 /* If we have a sign-extension of a zero-extended value, we can
5157 replace that by a single zero-extension. */
5158 if (inside_int && inter_int && final_int
5159 && inside_prec < inter_prec && inter_prec < final_prec
5160 && inside_unsignedp && !inter_unsignedp)
5161 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5162
5163 /* Two conversions in a row are not needed unless:
5164 - some conversion is floating-point (overstrict for now), or
5165 - the intermediate type is narrower than both initial and
5166 final, or
5167 - the intermediate type and innermost type differ in signedness,
5168 and the outermost type is wider than the intermediate, or
5169 - the initial type is a pointer type and the precisions of the
5170 intermediate and final types differ, or
5171 - the final type is a pointer type and the precisions of the
5172 initial and intermediate types differ. */
5173 if (! inside_float && ! inter_float && ! final_float
5174 && (inter_prec > inside_prec || inter_prec > final_prec)
5175 && ! (inside_int && inter_int
5176 && inter_unsignedp != inside_unsignedp
5177 && inter_prec < final_prec)
5178 && ((inter_unsignedp && inter_prec > inside_prec)
5179 == (final_unsignedp && final_prec > inter_prec))
5180 && ! (inside_ptr && inter_prec != final_prec)
5181 && ! (final_ptr && inside_prec != inter_prec)
5182 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5183 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5184 && ! final_ptr)
5185 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5186 }
5187
5188 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5189 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5190 /* Detect assigning a bitfield. */
5191 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5192 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5193 {
5194 /* Don't leave an assignment inside a conversion
5195 unless assigning a bitfield. */
5196 tree prev = TREE_OPERAND (t, 0);
5197 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
5198 /* First do the assignment, then return converted constant. */
5199 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
5200 TREE_USED (t) = 1;
5201 return t;
5202 }
5203 if (!wins)
5204 {
5205 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
5206 return t;
5207 }
5208 return fold_convert (t, arg0);
5209
5210 #if 0 /* This loses on &"foo"[0]. */
5211 case ARRAY_REF:
5212 {
5213 int i;
5214
5215 /* Fold an expression like: "foo"[2] */
5216 if (TREE_CODE (arg0) == STRING_CST
5217 && TREE_CODE (arg1) == INTEGER_CST
5218 && compare_tree_int (arg1, TREE_STRING_LENGTH (arg0)) < 0)
5219 {
5220 t = build_int_2 (TREE_STRING_POINTER (arg0)[TREE_INT_CST_LOW (arg))], 0);
5221 TREE_TYPE (t) = TREE_TYPE (TREE_TYPE (arg0));
5222 force_fit_type (t, 0);
5223 }
5224 }
5225 return t;
5226 #endif /* 0 */
5227
5228 case COMPONENT_REF:
5229 if (TREE_CODE (arg0) == CONSTRUCTOR)
5230 {
5231 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5232 if (m)
5233 t = TREE_VALUE (m);
5234 }
5235 return t;
5236
5237 case RANGE_EXPR:
5238 TREE_CONSTANT (t) = wins;
5239 return t;
5240
5241 case NEGATE_EXPR:
5242 if (wins)
5243 {
5244 if (TREE_CODE (arg0) == INTEGER_CST)
5245 {
5246 unsigned HOST_WIDE_INT low;
5247 HOST_WIDE_INT high;
5248 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5249 TREE_INT_CST_HIGH (arg0),
5250 &low, &high);
5251 t = build_int_2 (low, high);
5252 TREE_TYPE (t) = type;
5253 TREE_OVERFLOW (t)
5254 = (TREE_OVERFLOW (arg0)
5255 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
5256 TREE_CONSTANT_OVERFLOW (t)
5257 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5258 }
5259 else if (TREE_CODE (arg0) == REAL_CST)
5260 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5261 }
5262 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5263 return TREE_OPERAND (arg0, 0);
5264
5265 /* Convert - (a - b) to (b - a) for non-floating-point. */
5266 else if (TREE_CODE (arg0) == MINUS_EXPR
5267 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
5268 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
5269 TREE_OPERAND (arg0, 0));
5270
5271 return t;
5272
5273 case ABS_EXPR:
5274 if (wins)
5275 {
5276 if (TREE_CODE (arg0) == INTEGER_CST)
5277 {
5278 /* If the value is unsigned, then the absolute value is
5279 the same as the ordinary value. */
5280 if (TREE_UNSIGNED (type))
5281 return arg0;
5282 /* Similarly, if the value is non-negative. */
5283 else if (INT_CST_LT (integer_minus_one_node, arg0))
5284 return arg0;
5285 /* If the value is negative, then the absolute value is
5286 its negation. */
5287 else
5288 {
5289 unsigned HOST_WIDE_INT low;
5290 HOST_WIDE_INT high;
5291 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5292 TREE_INT_CST_HIGH (arg0),
5293 &low, &high);
5294 t = build_int_2 (low, high);
5295 TREE_TYPE (t) = type;
5296 TREE_OVERFLOW (t)
5297 = (TREE_OVERFLOW (arg0)
5298 | force_fit_type (t, overflow));
5299 TREE_CONSTANT_OVERFLOW (t)
5300 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5301 }
5302 }
5303 else if (TREE_CODE (arg0) == REAL_CST)
5304 {
5305 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5306 t = build_real (type,
5307 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5308 }
5309 }
5310 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
5311 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
5312 return t;
5313
5314 case CONJ_EXPR:
5315 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5316 return convert (type, arg0);
5317 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5318 return build (COMPLEX_EXPR, type,
5319 TREE_OPERAND (arg0, 0),
5320 negate_expr (TREE_OPERAND (arg0, 1)));
5321 else if (TREE_CODE (arg0) == COMPLEX_CST)
5322 return build_complex (type, TREE_REALPART (arg0),
5323 negate_expr (TREE_IMAGPART (arg0)));
5324 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5325 return fold (build (TREE_CODE (arg0), type,
5326 fold (build1 (CONJ_EXPR, type,
5327 TREE_OPERAND (arg0, 0))),
5328 fold (build1 (CONJ_EXPR,
5329 type, TREE_OPERAND (arg0, 1)))));
5330 else if (TREE_CODE (arg0) == CONJ_EXPR)
5331 return TREE_OPERAND (arg0, 0);
5332 return t;
5333
5334 case BIT_NOT_EXPR:
5335 if (wins)
5336 {
5337 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5338 ~ TREE_INT_CST_HIGH (arg0));
5339 TREE_TYPE (t) = type;
5340 force_fit_type (t, 0);
5341 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5342 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5343 }
5344 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5345 return TREE_OPERAND (arg0, 0);
5346 return t;
5347
5348 case PLUS_EXPR:
5349 /* A + (-B) -> A - B */
5350 if (TREE_CODE (arg1) == NEGATE_EXPR)
5351 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5352 /* (-A) + B -> B - A */
5353 if (TREE_CODE (arg0) == NEGATE_EXPR)
5354 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5355 else if (! FLOAT_TYPE_P (type))
5356 {
5357 if (integer_zerop (arg1))
5358 return non_lvalue (convert (type, arg0));
5359
5360 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5361 with a constant, and the two constants have no bits in common,
5362 we should treat this as a BIT_IOR_EXPR since this may produce more
5363 simplifications. */
5364 if (TREE_CODE (arg0) == BIT_AND_EXPR
5365 && TREE_CODE (arg1) == BIT_AND_EXPR
5366 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5367 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5368 && integer_zerop (const_binop (BIT_AND_EXPR,
5369 TREE_OPERAND (arg0, 1),
5370 TREE_OPERAND (arg1, 1), 0)))
5371 {
5372 code = BIT_IOR_EXPR;
5373 goto bit_ior;
5374 }
5375
5376 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5377 (plus (plus (mult) (mult)) (foo)) so that we can
5378 take advantage of the factoring cases below. */
5379 if ((TREE_CODE (arg0) == PLUS_EXPR
5380 && TREE_CODE (arg1) == MULT_EXPR)
5381 || (TREE_CODE (arg1) == PLUS_EXPR
5382 && TREE_CODE (arg0) == MULT_EXPR))
5383 {
5384 tree parg0, parg1, parg, marg;
5385
5386 if (TREE_CODE (arg0) == PLUS_EXPR)
5387 parg = arg0, marg = arg1;
5388 else
5389 parg = arg1, marg = arg0;
5390 parg0 = TREE_OPERAND (parg, 0);
5391 parg1 = TREE_OPERAND (parg, 1);
5392 STRIP_NOPS (parg0);
5393 STRIP_NOPS (parg1);
5394
5395 if (TREE_CODE (parg0) == MULT_EXPR
5396 && TREE_CODE (parg1) != MULT_EXPR)
5397 return fold (build (PLUS_EXPR, type,
5398 fold (build (PLUS_EXPR, type, parg0, marg)),
5399 parg1));
5400 if (TREE_CODE (parg0) != MULT_EXPR
5401 && TREE_CODE (parg1) == MULT_EXPR)
5402 return fold (build (PLUS_EXPR, type,
5403 fold (build (PLUS_EXPR, type, parg1, marg)),
5404 parg0));
5405 }
5406
5407 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5408 {
5409 tree arg00, arg01, arg10, arg11;
5410 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5411
5412 /* (A * C) + (B * C) -> (A+B) * C.
5413 We are most concerned about the case where C is a constant,
5414 but other combinations show up during loop reduction. Since
5415 it is not difficult, try all four possibilities. */
5416
5417 arg00 = TREE_OPERAND (arg0, 0);
5418 arg01 = TREE_OPERAND (arg0, 1);
5419 arg10 = TREE_OPERAND (arg1, 0);
5420 arg11 = TREE_OPERAND (arg1, 1);
5421 same = NULL_TREE;
5422
5423 if (operand_equal_p (arg01, arg11, 0))
5424 same = arg01, alt0 = arg00, alt1 = arg10;
5425 else if (operand_equal_p (arg00, arg10, 0))
5426 same = arg00, alt0 = arg01, alt1 = arg11;
5427 else if (operand_equal_p (arg00, arg11, 0))
5428 same = arg00, alt0 = arg01, alt1 = arg10;
5429 else if (operand_equal_p (arg01, arg10, 0))
5430 same = arg01, alt0 = arg00, alt1 = arg11;
5431
5432 /* No identical multiplicands; see if we can find a common
5433 power-of-two factor in non-power-of-two multiplies. This
5434 can help in multi-dimensional array access. */
5435 else if (TREE_CODE (arg01) == INTEGER_CST
5436 && TREE_CODE (arg11) == INTEGER_CST
5437 && TREE_INT_CST_HIGH (arg01) == 0
5438 && TREE_INT_CST_HIGH (arg11) == 0)
5439 {
5440 HOST_WIDE_INT int01, int11, tmp;
5441 int01 = TREE_INT_CST_LOW (arg01);
5442 int11 = TREE_INT_CST_LOW (arg11);
5443
5444 /* Move min of absolute values to int11. */
5445 if ((int01 >= 0 ? int01 : -int01)
5446 < (int11 >= 0 ? int11 : -int11))
5447 {
5448 tmp = int01, int01 = int11, int11 = tmp;
5449 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5450 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5451 }
5452
5453 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5454 {
5455 alt0 = fold (build (MULT_EXPR, type, arg00,
5456 build_int_2 (int01 / int11, 0)));
5457 alt1 = arg10;
5458 same = arg11;
5459 }
5460 }
5461
5462 if (same)
5463 return fold (build (MULT_EXPR, type,
5464 fold (build (PLUS_EXPR, type, alt0, alt1)),
5465 same));
5466 }
5467 }
5468 /* In IEEE floating point, x+0 may not equal x. */
5469 else if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5470 || flag_unsafe_math_optimizations)
5471 && real_zerop (arg1))
5472 return non_lvalue (convert (type, arg0));
5473 /* x+(-0) equals x, even for IEEE. */
5474 else if (TREE_CODE (arg1) == REAL_CST
5475 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (arg1)))
5476 return non_lvalue (convert (type, arg0));
5477
5478 bit_rotate:
5479 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5480 is a rotate of A by C1 bits. */
5481 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5482 is a rotate of A by B bits. */
5483 {
5484 register enum tree_code code0, code1;
5485 code0 = TREE_CODE (arg0);
5486 code1 = TREE_CODE (arg1);
5487 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5488 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5489 && operand_equal_p (TREE_OPERAND (arg0, 0),
5490 TREE_OPERAND (arg1, 0), 0)
5491 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5492 {
5493 register tree tree01, tree11;
5494 register enum tree_code code01, code11;
5495
5496 tree01 = TREE_OPERAND (arg0, 1);
5497 tree11 = TREE_OPERAND (arg1, 1);
5498 STRIP_NOPS (tree01);
5499 STRIP_NOPS (tree11);
5500 code01 = TREE_CODE (tree01);
5501 code11 = TREE_CODE (tree11);
5502 if (code01 == INTEGER_CST
5503 && code11 == INTEGER_CST
5504 && TREE_INT_CST_HIGH (tree01) == 0
5505 && TREE_INT_CST_HIGH (tree11) == 0
5506 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5507 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5508 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5509 code0 == LSHIFT_EXPR ? tree01 : tree11);
5510 else if (code11 == MINUS_EXPR)
5511 {
5512 tree tree110, tree111;
5513 tree110 = TREE_OPERAND (tree11, 0);
5514 tree111 = TREE_OPERAND (tree11, 1);
5515 STRIP_NOPS (tree110);
5516 STRIP_NOPS (tree111);
5517 if (TREE_CODE (tree110) == INTEGER_CST
5518 && 0 == compare_tree_int (tree110,
5519 TYPE_PRECISION
5520 (TREE_TYPE (TREE_OPERAND
5521 (arg0, 0))))
5522 && operand_equal_p (tree01, tree111, 0))
5523 return build ((code0 == LSHIFT_EXPR
5524 ? LROTATE_EXPR
5525 : RROTATE_EXPR),
5526 type, TREE_OPERAND (arg0, 0), tree01);
5527 }
5528 else if (code01 == MINUS_EXPR)
5529 {
5530 tree tree010, tree011;
5531 tree010 = TREE_OPERAND (tree01, 0);
5532 tree011 = TREE_OPERAND (tree01, 1);
5533 STRIP_NOPS (tree010);
5534 STRIP_NOPS (tree011);
5535 if (TREE_CODE (tree010) == INTEGER_CST
5536 && 0 == compare_tree_int (tree010,
5537 TYPE_PRECISION
5538 (TREE_TYPE (TREE_OPERAND
5539 (arg0, 0))))
5540 && operand_equal_p (tree11, tree011, 0))
5541 return build ((code0 != LSHIFT_EXPR
5542 ? LROTATE_EXPR
5543 : RROTATE_EXPR),
5544 type, TREE_OPERAND (arg0, 0), tree11);
5545 }
5546 }
5547 }
5548
5549 associate:
5550 /* In most languages, can't associate operations on floats through
5551 parentheses. Rather than remember where the parentheses were, we
5552 don't associate floats at all. It shouldn't matter much. However,
5553 associating multiplications is only very slightly inaccurate, so do
5554 that if -funsafe-math-optimizations is specified. */
5555
5556 if (! wins
5557 && (! FLOAT_TYPE_P (type)
5558 || (flag_unsafe_math_optimizations && code == MULT_EXPR)))
5559 {
5560 tree var0, con0, lit0, var1, con1, lit1;
5561
5562 /* Split both trees into variables, constants, and literals. Then
5563 associate each group together, the constants with literals,
5564 then the result with variables. This increases the chances of
5565 literals being recombined later and of generating relocatable
5566 expressions for the sum of a constant and literal. */
5567 var0 = split_tree (arg0, code, &con0, &lit0, 0);
5568 var1 = split_tree (arg1, code, &con1, &lit1, code == MINUS_EXPR);
5569
5570 /* Only do something if we found more than two objects. Otherwise,
5571 nothing has changed and we risk infinite recursion. */
5572 if (2 < ((var0 != 0) + (var1 != 0) + (con0 != 0) + (con1 != 0)
5573 + (lit0 != 0) + (lit1 != 0)))
5574 {
5575 var0 = associate_trees (var0, var1, code, type);
5576 con0 = associate_trees (con0, con1, code, type);
5577 lit0 = associate_trees (lit0, lit1, code, type);
5578 con0 = associate_trees (con0, lit0, code, type);
5579 return convert (type, associate_trees (var0, con0, code, type));
5580 }
5581 }
5582
5583 binary:
5584 #if defined (REAL_IS_NOT_DOUBLE) && ! defined (REAL_ARITHMETIC)
5585 if (TREE_CODE (arg1) == REAL_CST)
5586 return t;
5587 #endif /* REAL_IS_NOT_DOUBLE, and no REAL_ARITHMETIC */
5588 if (wins)
5589 t1 = const_binop (code, arg0, arg1, 0);
5590 if (t1 != NULL_TREE)
5591 {
5592 /* The return value should always have
5593 the same type as the original expression. */
5594 if (TREE_TYPE (t1) != TREE_TYPE (t))
5595 t1 = convert (TREE_TYPE (t), t1);
5596
5597 return t1;
5598 }
5599 return t;
5600
5601 case MINUS_EXPR:
5602 /* A - (-B) -> A + B */
5603 if (TREE_CODE (arg1) == NEGATE_EXPR)
5604 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5605 /* (-A) - CST -> (-CST) - A for floating point (what about ints ?) */
5606 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
5607 return
5608 fold (build (MINUS_EXPR, type,
5609 build_real (TREE_TYPE (arg1),
5610 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1))),
5611 TREE_OPERAND (arg0, 0)));
5612
5613 if (! FLOAT_TYPE_P (type))
5614 {
5615 if (! wins && integer_zerop (arg0))
5616 return negate_expr (convert (type, arg1));
5617 if (integer_zerop (arg1))
5618 return non_lvalue (convert (type, arg0));
5619
5620 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
5621 about the case where C is a constant, just try one of the
5622 four possibilities. */
5623
5624 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
5625 && operand_equal_p (TREE_OPERAND (arg0, 1),
5626 TREE_OPERAND (arg1, 1), 0))
5627 return fold (build (MULT_EXPR, type,
5628 fold (build (MINUS_EXPR, type,
5629 TREE_OPERAND (arg0, 0),
5630 TREE_OPERAND (arg1, 0))),
5631 TREE_OPERAND (arg0, 1)));
5632 }
5633
5634 else if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5635 || flag_unsafe_math_optimizations)
5636 {
5637 /* Except with IEEE floating point, 0-x equals -x. */
5638 if (! wins && real_zerop (arg0))
5639 return negate_expr (convert (type, arg1));
5640 /* Except with IEEE floating point, x-0 equals x. */
5641 if (real_zerop (arg1))
5642 return non_lvalue (convert (type, arg0));
5643 }
5644
5645 /* Fold &x - &x. This can happen from &x.foo - &x.
5646 This is unsafe for certain floats even in non-IEEE formats.
5647 In IEEE, it is unsafe because it does wrong for NaNs.
5648 Also note that operand_equal_p is always false if an operand
5649 is volatile. */
5650
5651 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
5652 && operand_equal_p (arg0, arg1, 0))
5653 return convert (type, integer_zero_node);
5654
5655 goto associate;
5656
5657 case MULT_EXPR:
5658 /* (-A) * (-B) -> A * B */
5659 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5660 return fold (build (MULT_EXPR, type, TREE_OPERAND (arg0, 0),
5661 TREE_OPERAND (arg1, 0)));
5662
5663 if (! FLOAT_TYPE_P (type))
5664 {
5665 if (integer_zerop (arg1))
5666 return omit_one_operand (type, arg1, arg0);
5667 if (integer_onep (arg1))
5668 return non_lvalue (convert (type, arg0));
5669
5670 /* (a * (1 << b)) is (a << b) */
5671 if (TREE_CODE (arg1) == LSHIFT_EXPR
5672 && integer_onep (TREE_OPERAND (arg1, 0)))
5673 return fold (build (LSHIFT_EXPR, type, arg0,
5674 TREE_OPERAND (arg1, 1)));
5675 if (TREE_CODE (arg0) == LSHIFT_EXPR
5676 && integer_onep (TREE_OPERAND (arg0, 0)))
5677 return fold (build (LSHIFT_EXPR, type, arg1,
5678 TREE_OPERAND (arg0, 1)));
5679
5680 if (TREE_CODE (arg1) == INTEGER_CST
5681 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5682 code, NULL_TREE)))
5683 return convert (type, tem);
5684
5685 }
5686 else
5687 {
5688 /* x*0 is 0, except for IEEE floating point. */
5689 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5690 || flag_unsafe_math_optimizations)
5691 && real_zerop (arg1))
5692 return omit_one_operand (type, arg1, arg0);
5693 /* In IEEE floating point, x*1 is not equivalent to x for snans.
5694 However, ANSI says we can drop signals,
5695 so we can do this anyway. */
5696 if (real_onep (arg1))
5697 return non_lvalue (convert (type, arg0));
5698 /* x*2 is x+x */
5699 if (! wins && real_twop (arg1) && global_bindings_p () == 0
5700 && ! contains_placeholder_p (arg0))
5701 {
5702 tree arg = save_expr (arg0);
5703 return build (PLUS_EXPR, type, arg, arg);
5704 }
5705 }
5706 goto associate;
5707
5708 case BIT_IOR_EXPR:
5709 bit_ior:
5710 if (integer_all_onesp (arg1))
5711 return omit_one_operand (type, arg1, arg0);
5712 if (integer_zerop (arg1))
5713 return non_lvalue (convert (type, arg0));
5714 t1 = distribute_bit_expr (code, type, arg0, arg1);
5715 if (t1 != NULL_TREE)
5716 return t1;
5717
5718 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
5719
5720 This results in more efficient code for machines without a NAND
5721 instruction. Combine will canonicalize to the first form
5722 which will allow use of NAND instructions provided by the
5723 backend if they exist. */
5724 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5725 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5726 {
5727 return fold (build1 (BIT_NOT_EXPR, type,
5728 build (BIT_AND_EXPR, type,
5729 TREE_OPERAND (arg0, 0),
5730 TREE_OPERAND (arg1, 0))));
5731 }
5732
5733 /* See if this can be simplified into a rotate first. If that
5734 is unsuccessful continue in the association code. */
5735 goto bit_rotate;
5736
5737 case BIT_XOR_EXPR:
5738 if (integer_zerop (arg1))
5739 return non_lvalue (convert (type, arg0));
5740 if (integer_all_onesp (arg1))
5741 return fold (build1 (BIT_NOT_EXPR, type, arg0));
5742
5743 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
5744 with a constant, and the two constants have no bits in common,
5745 we should treat this as a BIT_IOR_EXPR since this may produce more
5746 simplifications. */
5747 if (TREE_CODE (arg0) == BIT_AND_EXPR
5748 && TREE_CODE (arg1) == BIT_AND_EXPR
5749 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5750 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5751 && integer_zerop (const_binop (BIT_AND_EXPR,
5752 TREE_OPERAND (arg0, 1),
5753 TREE_OPERAND (arg1, 1), 0)))
5754 {
5755 code = BIT_IOR_EXPR;
5756 goto bit_ior;
5757 }
5758
5759 /* See if this can be simplified into a rotate first. If that
5760 is unsuccessful continue in the association code. */
5761 goto bit_rotate;
5762
5763 case BIT_AND_EXPR:
5764 bit_and:
5765 if (integer_all_onesp (arg1))
5766 return non_lvalue (convert (type, arg0));
5767 if (integer_zerop (arg1))
5768 return omit_one_operand (type, arg1, arg0);
5769 t1 = distribute_bit_expr (code, type, arg0, arg1);
5770 if (t1 != NULL_TREE)
5771 return t1;
5772 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
5773 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == NOP_EXPR
5774 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0))))
5775 {
5776 unsigned int prec
5777 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)));
5778
5779 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
5780 && (~TREE_INT_CST_LOW (arg0)
5781 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
5782 return build1 (NOP_EXPR, type, TREE_OPERAND (arg1, 0));
5783 }
5784 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
5785 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5786 {
5787 unsigned int prec
5788 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
5789
5790 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
5791 && (~TREE_INT_CST_LOW (arg1)
5792 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
5793 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
5794 }
5795
5796 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
5797
5798 This results in more efficient code for machines without a NOR
5799 instruction. Combine will canonicalize to the first form
5800 which will allow use of NOR instructions provided by the
5801 backend if they exist. */
5802 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5803 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5804 {
5805 return fold (build1 (BIT_NOT_EXPR, type,
5806 build (BIT_IOR_EXPR, type,
5807 TREE_OPERAND (arg0, 0),
5808 TREE_OPERAND (arg1, 0))));
5809 }
5810
5811 goto associate;
5812
5813 case BIT_ANDTC_EXPR:
5814 if (integer_all_onesp (arg0))
5815 return non_lvalue (convert (type, arg1));
5816 if (integer_zerop (arg0))
5817 return omit_one_operand (type, arg0, arg1);
5818 if (TREE_CODE (arg1) == INTEGER_CST)
5819 {
5820 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
5821 code = BIT_AND_EXPR;
5822 goto bit_and;
5823 }
5824 goto binary;
5825
5826 case RDIV_EXPR:
5827 /* In most cases, do nothing with a divide by zero. */
5828 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
5829 #ifndef REAL_INFINITY
5830 if (TREE_CODE (arg1) == REAL_CST && real_zerop (arg1))
5831 return t;
5832 #endif
5833 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
5834
5835 /* (-A) / (-B) -> A / B */
5836 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5837 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
5838 TREE_OPERAND (arg1, 0)));
5839
5840 /* In IEEE floating point, x/1 is not equivalent to x for snans.
5841 However, ANSI says we can drop signals, so we can do this anyway. */
5842 if (real_onep (arg1))
5843 return non_lvalue (convert (type, arg0));
5844
5845 /* If ARG1 is a constant, we can convert this to a multiply by the
5846 reciprocal. This does not have the same rounding properties,
5847 so only do this if -funsafe-math-optimizations. We can actually
5848 always safely do it if ARG1 is a power of two, but it's hard to
5849 tell if it is or not in a portable manner. */
5850 if (TREE_CODE (arg1) == REAL_CST)
5851 {
5852 if (flag_unsafe_math_optimizations
5853 && 0 != (tem = const_binop (code, build_real (type, dconst1),
5854 arg1, 0)))
5855 return fold (build (MULT_EXPR, type, arg0, tem));
5856 /* Find the reciprocal if optimizing and the result is exact. */
5857 else if (optimize)
5858 {
5859 REAL_VALUE_TYPE r;
5860 r = TREE_REAL_CST (arg1);
5861 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
5862 {
5863 tem = build_real (type, r);
5864 return fold (build (MULT_EXPR, type, arg0, tem));
5865 }
5866 }
5867 }
5868 goto binary;
5869
5870 case TRUNC_DIV_EXPR:
5871 case ROUND_DIV_EXPR:
5872 case FLOOR_DIV_EXPR:
5873 case CEIL_DIV_EXPR:
5874 case EXACT_DIV_EXPR:
5875 if (integer_onep (arg1))
5876 return non_lvalue (convert (type, arg0));
5877 if (integer_zerop (arg1))
5878 return t;
5879
5880 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
5881 operation, EXACT_DIV_EXPR.
5882
5883 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
5884 At one time others generated faster code, it's not clear if they do
5885 after the last round to changes to the DIV code in expmed.c. */
5886 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
5887 && multiple_of_p (type, arg0, arg1))
5888 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
5889
5890 if (TREE_CODE (arg1) == INTEGER_CST
5891 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5892 code, NULL_TREE)))
5893 return convert (type, tem);
5894
5895 goto binary;
5896
5897 case CEIL_MOD_EXPR:
5898 case FLOOR_MOD_EXPR:
5899 case ROUND_MOD_EXPR:
5900 case TRUNC_MOD_EXPR:
5901 if (integer_onep (arg1))
5902 return omit_one_operand (type, integer_zero_node, arg0);
5903 if (integer_zerop (arg1))
5904 return t;
5905
5906 if (TREE_CODE (arg1) == INTEGER_CST
5907 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5908 code, NULL_TREE)))
5909 return convert (type, tem);
5910
5911 goto binary;
5912
5913 case LSHIFT_EXPR:
5914 case RSHIFT_EXPR:
5915 case LROTATE_EXPR:
5916 case RROTATE_EXPR:
5917 if (integer_zerop (arg1))
5918 return non_lvalue (convert (type, arg0));
5919 /* Since negative shift count is not well-defined,
5920 don't try to compute it in the compiler. */
5921 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
5922 return t;
5923 /* Rewrite an LROTATE_EXPR by a constant into an
5924 RROTATE_EXPR by a new constant. */
5925 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
5926 {
5927 TREE_SET_CODE (t, RROTATE_EXPR);
5928 code = RROTATE_EXPR;
5929 TREE_OPERAND (t, 1) = arg1
5930 = const_binop
5931 (MINUS_EXPR,
5932 convert (TREE_TYPE (arg1),
5933 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
5934 arg1, 0);
5935 if (tree_int_cst_sgn (arg1) < 0)
5936 return t;
5937 }
5938
5939 /* If we have a rotate of a bit operation with the rotate count and
5940 the second operand of the bit operation both constant,
5941 permute the two operations. */
5942 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5943 && (TREE_CODE (arg0) == BIT_AND_EXPR
5944 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
5945 || TREE_CODE (arg0) == BIT_IOR_EXPR
5946 || TREE_CODE (arg0) == BIT_XOR_EXPR)
5947 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
5948 return fold (build (TREE_CODE (arg0), type,
5949 fold (build (code, type,
5950 TREE_OPERAND (arg0, 0), arg1)),
5951 fold (build (code, type,
5952 TREE_OPERAND (arg0, 1), arg1))));
5953
5954 /* Two consecutive rotates adding up to the width of the mode can
5955 be ignored. */
5956 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5957 && TREE_CODE (arg0) == RROTATE_EXPR
5958 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5959 && TREE_INT_CST_HIGH (arg1) == 0
5960 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
5961 && ((TREE_INT_CST_LOW (arg1)
5962 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
5963 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
5964 return TREE_OPERAND (arg0, 0);
5965
5966 goto binary;
5967
5968 case MIN_EXPR:
5969 if (operand_equal_p (arg0, arg1, 0))
5970 return omit_one_operand (type, arg0, arg1);
5971 if (INTEGRAL_TYPE_P (type)
5972 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
5973 return omit_one_operand (type, arg1, arg0);
5974 goto associate;
5975
5976 case MAX_EXPR:
5977 if (operand_equal_p (arg0, arg1, 0))
5978 return omit_one_operand (type, arg0, arg1);
5979 if (INTEGRAL_TYPE_P (type)
5980 && TYPE_MAX_VALUE (type)
5981 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
5982 return omit_one_operand (type, arg1, arg0);
5983 goto associate;
5984
5985 case TRUTH_NOT_EXPR:
5986 /* Note that the operand of this must be an int
5987 and its values must be 0 or 1.
5988 ("true" is a fixed value perhaps depending on the language,
5989 but we don't handle values other than 1 correctly yet.) */
5990 tem = invert_truthvalue (arg0);
5991 /* Avoid infinite recursion. */
5992 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
5993 return t;
5994 return convert (type, tem);
5995
5996 case TRUTH_ANDIF_EXPR:
5997 /* Note that the operands of this must be ints
5998 and their values must be 0 or 1.
5999 ("true" is a fixed value perhaps depending on the language.) */
6000 /* If first arg is constant zero, return it. */
6001 if (integer_zerop (arg0))
6002 return convert (type, arg0);
6003 case TRUTH_AND_EXPR:
6004 /* If either arg is constant true, drop it. */
6005 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6006 return non_lvalue (convert (type, arg1));
6007 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
6008 /* Preserve sequence points. */
6009 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
6010 return non_lvalue (convert (type, arg0));
6011 /* If second arg is constant zero, result is zero, but first arg
6012 must be evaluated. */
6013 if (integer_zerop (arg1))
6014 return omit_one_operand (type, arg1, arg0);
6015 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
6016 case will be handled here. */
6017 if (integer_zerop (arg0))
6018 return omit_one_operand (type, arg0, arg1);
6019
6020 truth_andor:
6021 /* We only do these simplifications if we are optimizing. */
6022 if (!optimize)
6023 return t;
6024
6025 /* Check for things like (A || B) && (A || C). We can convert this
6026 to A || (B && C). Note that either operator can be any of the four
6027 truth and/or operations and the transformation will still be
6028 valid. Also note that we only care about order for the
6029 ANDIF and ORIF operators. If B contains side effects, this
6030 might change the truth-value of A. */
6031 if (TREE_CODE (arg0) == TREE_CODE (arg1)
6032 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
6033 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
6034 || TREE_CODE (arg0) == TRUTH_AND_EXPR
6035 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
6036 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
6037 {
6038 tree a00 = TREE_OPERAND (arg0, 0);
6039 tree a01 = TREE_OPERAND (arg0, 1);
6040 tree a10 = TREE_OPERAND (arg1, 0);
6041 tree a11 = TREE_OPERAND (arg1, 1);
6042 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
6043 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
6044 && (code == TRUTH_AND_EXPR
6045 || code == TRUTH_OR_EXPR));
6046
6047 if (operand_equal_p (a00, a10, 0))
6048 return fold (build (TREE_CODE (arg0), type, a00,
6049 fold (build (code, type, a01, a11))));
6050 else if (commutative && operand_equal_p (a00, a11, 0))
6051 return fold (build (TREE_CODE (arg0), type, a00,
6052 fold (build (code, type, a01, a10))));
6053 else if (commutative && operand_equal_p (a01, a10, 0))
6054 return fold (build (TREE_CODE (arg0), type, a01,
6055 fold (build (code, type, a00, a11))));
6056
6057 /* This case if tricky because we must either have commutative
6058 operators or else A10 must not have side-effects. */
6059
6060 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
6061 && operand_equal_p (a01, a11, 0))
6062 return fold (build (TREE_CODE (arg0), type,
6063 fold (build (code, type, a00, a10)),
6064 a01));
6065 }
6066
6067 /* See if we can build a range comparison. */
6068 if (0 != (tem = fold_range_test (t)))
6069 return tem;
6070
6071 /* Check for the possibility of merging component references. If our
6072 lhs is another similar operation, try to merge its rhs with our
6073 rhs. Then try to merge our lhs and rhs. */
6074 if (TREE_CODE (arg0) == code
6075 && 0 != (tem = fold_truthop (code, type,
6076 TREE_OPERAND (arg0, 1), arg1)))
6077 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6078
6079 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
6080 return tem;
6081
6082 return t;
6083
6084 case TRUTH_ORIF_EXPR:
6085 /* Note that the operands of this must be ints
6086 and their values must be 0 or true.
6087 ("true" is a fixed value perhaps depending on the language.) */
6088 /* If first arg is constant true, return it. */
6089 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6090 return convert (type, arg0);
6091 case TRUTH_OR_EXPR:
6092 /* If either arg is constant zero, drop it. */
6093 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
6094 return non_lvalue (convert (type, arg1));
6095 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
6096 /* Preserve sequence points. */
6097 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
6098 return non_lvalue (convert (type, arg0));
6099 /* If second arg is constant true, result is true, but we must
6100 evaluate first arg. */
6101 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
6102 return omit_one_operand (type, arg1, arg0);
6103 /* Likewise for first arg, but note this only occurs here for
6104 TRUTH_OR_EXPR. */
6105 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6106 return omit_one_operand (type, arg0, arg1);
6107 goto truth_andor;
6108
6109 case TRUTH_XOR_EXPR:
6110 /* If either arg is constant zero, drop it. */
6111 if (integer_zerop (arg0))
6112 return non_lvalue (convert (type, arg1));
6113 if (integer_zerop (arg1))
6114 return non_lvalue (convert (type, arg0));
6115 /* If either arg is constant true, this is a logical inversion. */
6116 if (integer_onep (arg0))
6117 return non_lvalue (convert (type, invert_truthvalue (arg1)));
6118 if (integer_onep (arg1))
6119 return non_lvalue (convert (type, invert_truthvalue (arg0)));
6120 return t;
6121
6122 case EQ_EXPR:
6123 case NE_EXPR:
6124 case LT_EXPR:
6125 case GT_EXPR:
6126 case LE_EXPR:
6127 case GE_EXPR:
6128 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
6129 {
6130 /* (-a) CMP (-b) -> b CMP a */
6131 if (TREE_CODE (arg0) == NEGATE_EXPR
6132 && TREE_CODE (arg1) == NEGATE_EXPR)
6133 return fold (build (code, type, TREE_OPERAND (arg1, 0),
6134 TREE_OPERAND (arg0, 0)));
6135 /* (-a) CMP CST -> a swap(CMP) (-CST) */
6136 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
6137 return
6138 fold (build
6139 (swap_tree_comparison (code), type,
6140 TREE_OPERAND (arg0, 0),
6141 build_real (TREE_TYPE (arg1),
6142 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1)))));
6143 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
6144 /* a CMP (-0) -> a CMP 0 */
6145 if (TREE_CODE (arg1) == REAL_CST
6146 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (arg1)))
6147 return fold (build (code, type, arg0,
6148 build_real (TREE_TYPE (arg1), dconst0)));
6149 }
6150
6151 /* If one arg is a constant integer, put it last. */
6152 if (TREE_CODE (arg0) == INTEGER_CST
6153 && TREE_CODE (arg1) != INTEGER_CST)
6154 {
6155 TREE_OPERAND (t, 0) = arg1;
6156 TREE_OPERAND (t, 1) = arg0;
6157 arg0 = TREE_OPERAND (t, 0);
6158 arg1 = TREE_OPERAND (t, 1);
6159 code = swap_tree_comparison (code);
6160 TREE_SET_CODE (t, code);
6161 }
6162
6163 /* Convert foo++ == CONST into ++foo == CONST + INCR.
6164 First, see if one arg is constant; find the constant arg
6165 and the other one. */
6166 {
6167 tree constop = 0, varop = NULL_TREE;
6168 int constopnum = -1;
6169
6170 if (TREE_CONSTANT (arg1))
6171 constopnum = 1, constop = arg1, varop = arg0;
6172 if (TREE_CONSTANT (arg0))
6173 constopnum = 0, constop = arg0, varop = arg1;
6174
6175 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
6176 {
6177 /* This optimization is invalid for ordered comparisons
6178 if CONST+INCR overflows or if foo+incr might overflow.
6179 This optimization is invalid for floating point due to rounding.
6180 For pointer types we assume overflow doesn't happen. */
6181 if (POINTER_TYPE_P (TREE_TYPE (varop))
6182 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6183 && (code == EQ_EXPR || code == NE_EXPR)))
6184 {
6185 tree newconst
6186 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
6187 constop, TREE_OPERAND (varop, 1)));
6188
6189 /* Do not overwrite the current varop to be a preincrement,
6190 create a new node so that we won't confuse our caller who
6191 might create trees and throw them away, reusing the
6192 arguments that they passed to build. This shows up in
6193 the THEN or ELSE parts of ?: being postincrements. */
6194 varop = build (PREINCREMENT_EXPR, TREE_TYPE (varop),
6195 TREE_OPERAND (varop, 0),
6196 TREE_OPERAND (varop, 1));
6197
6198 /* If VAROP is a reference to a bitfield, we must mask
6199 the constant by the width of the field. */
6200 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6201 && DECL_BIT_FIELD(TREE_OPERAND
6202 (TREE_OPERAND (varop, 0), 1)))
6203 {
6204 int size
6205 = TREE_INT_CST_LOW (DECL_SIZE
6206 (TREE_OPERAND
6207 (TREE_OPERAND (varop, 0), 1)));
6208 tree mask, unsigned_type;
6209 unsigned int precision;
6210 tree folded_compare;
6211
6212 /* First check whether the comparison would come out
6213 always the same. If we don't do that we would
6214 change the meaning with the masking. */
6215 if (constopnum == 0)
6216 folded_compare = fold (build (code, type, constop,
6217 TREE_OPERAND (varop, 0)));
6218 else
6219 folded_compare = fold (build (code, type,
6220 TREE_OPERAND (varop, 0),
6221 constop));
6222 if (integer_zerop (folded_compare)
6223 || integer_onep (folded_compare))
6224 return omit_one_operand (type, folded_compare, varop);
6225
6226 unsigned_type = type_for_size (size, 1);
6227 precision = TYPE_PRECISION (unsigned_type);
6228 mask = build_int_2 (~0, ~0);
6229 TREE_TYPE (mask) = unsigned_type;
6230 force_fit_type (mask, 0);
6231 mask = const_binop (RSHIFT_EXPR, mask,
6232 size_int (precision - size), 0);
6233 newconst = fold (build (BIT_AND_EXPR,
6234 TREE_TYPE (varop), newconst,
6235 convert (TREE_TYPE (varop),
6236 mask)));
6237 }
6238
6239 t = build (code, type,
6240 (constopnum == 0) ? newconst : varop,
6241 (constopnum == 1) ? newconst : varop);
6242 return t;
6243 }
6244 }
6245 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
6246 {
6247 if (POINTER_TYPE_P (TREE_TYPE (varop))
6248 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6249 && (code == EQ_EXPR || code == NE_EXPR)))
6250 {
6251 tree newconst
6252 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
6253 constop, TREE_OPERAND (varop, 1)));
6254
6255 /* Do not overwrite the current varop to be a predecrement,
6256 create a new node so that we won't confuse our caller who
6257 might create trees and throw them away, reusing the
6258 arguments that they passed to build. This shows up in
6259 the THEN or ELSE parts of ?: being postdecrements. */
6260 varop = build (PREDECREMENT_EXPR, TREE_TYPE (varop),
6261 TREE_OPERAND (varop, 0),
6262 TREE_OPERAND (varop, 1));
6263
6264 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6265 && DECL_BIT_FIELD(TREE_OPERAND
6266 (TREE_OPERAND (varop, 0), 1)))
6267 {
6268 int size
6269 = TREE_INT_CST_LOW (DECL_SIZE
6270 (TREE_OPERAND
6271 (TREE_OPERAND (varop, 0), 1)));
6272 tree mask, unsigned_type;
6273 unsigned int precision;
6274 tree folded_compare;
6275
6276 if (constopnum == 0)
6277 folded_compare = fold (build (code, type, constop,
6278 TREE_OPERAND (varop, 0)));
6279 else
6280 folded_compare = fold (build (code, type,
6281 TREE_OPERAND (varop, 0),
6282 constop));
6283 if (integer_zerop (folded_compare)
6284 || integer_onep (folded_compare))
6285 return omit_one_operand (type, folded_compare, varop);
6286
6287 unsigned_type = type_for_size (size, 1);
6288 precision = TYPE_PRECISION (unsigned_type);
6289 mask = build_int_2 (~0, ~0);
6290 TREE_TYPE (mask) = TREE_TYPE (varop);
6291 force_fit_type (mask, 0);
6292 mask = const_binop (RSHIFT_EXPR, mask,
6293 size_int (precision - size), 0);
6294 newconst = fold (build (BIT_AND_EXPR,
6295 TREE_TYPE (varop), newconst,
6296 convert (TREE_TYPE (varop),
6297 mask)));
6298 }
6299
6300 t = build (code, type,
6301 (constopnum == 0) ? newconst : varop,
6302 (constopnum == 1) ? newconst : varop);
6303 return t;
6304 }
6305 }
6306 }
6307
6308 /* Change X >= CST to X > (CST - 1) if CST is positive. */
6309 if (TREE_CODE (arg1) == INTEGER_CST
6310 && TREE_CODE (arg0) != INTEGER_CST
6311 && tree_int_cst_sgn (arg1) > 0)
6312 {
6313 switch (TREE_CODE (t))
6314 {
6315 case GE_EXPR:
6316 code = GT_EXPR;
6317 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6318 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6319 break;
6320
6321 case LT_EXPR:
6322 code = LE_EXPR;
6323 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6324 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6325 break;
6326
6327 default:
6328 break;
6329 }
6330 }
6331
6332 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
6333 a MINUS_EXPR of a constant, we can convert it into a comparison with
6334 a revised constant as long as no overflow occurs. */
6335 if ((code == EQ_EXPR || code == NE_EXPR)
6336 && TREE_CODE (arg1) == INTEGER_CST
6337 && (TREE_CODE (arg0) == PLUS_EXPR
6338 || TREE_CODE (arg0) == MINUS_EXPR)
6339 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6340 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6341 ? MINUS_EXPR : PLUS_EXPR,
6342 arg1, TREE_OPERAND (arg0, 1), 0))
6343 && ! TREE_CONSTANT_OVERFLOW (tem))
6344 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6345
6346 /* Similarly for a NEGATE_EXPR. */
6347 else if ((code == EQ_EXPR || code == NE_EXPR)
6348 && TREE_CODE (arg0) == NEGATE_EXPR
6349 && TREE_CODE (arg1) == INTEGER_CST
6350 && 0 != (tem = negate_expr (arg1))
6351 && TREE_CODE (tem) == INTEGER_CST
6352 && ! TREE_CONSTANT_OVERFLOW (tem))
6353 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6354
6355 /* If we have X - Y == 0, we can convert that to X == Y and similarly
6356 for !=. Don't do this for ordered comparisons due to overflow. */
6357 else if ((code == NE_EXPR || code == EQ_EXPR)
6358 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
6359 return fold (build (code, type,
6360 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
6361
6362 /* If we are widening one operand of an integer comparison,
6363 see if the other operand is similarly being widened. Perhaps we
6364 can do the comparison in the narrower type. */
6365 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
6366 && TREE_CODE (arg0) == NOP_EXPR
6367 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
6368 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
6369 && (TREE_TYPE (t1) == TREE_TYPE (tem)
6370 || (TREE_CODE (t1) == INTEGER_CST
6371 && int_fits_type_p (t1, TREE_TYPE (tem)))))
6372 return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
6373
6374 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
6375 constant, we can simplify it. */
6376 else if (TREE_CODE (arg1) == INTEGER_CST
6377 && (TREE_CODE (arg0) == MIN_EXPR
6378 || TREE_CODE (arg0) == MAX_EXPR)
6379 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6380 return optimize_minmax_comparison (t);
6381
6382 /* If we are comparing an ABS_EXPR with a constant, we can
6383 convert all the cases into explicit comparisons, but they may
6384 well not be faster than doing the ABS and one comparison.
6385 But ABS (X) <= C is a range comparison, which becomes a subtraction
6386 and a comparison, and is probably faster. */
6387 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6388 && TREE_CODE (arg0) == ABS_EXPR
6389 && ! TREE_SIDE_EFFECTS (arg0)
6390 && (0 != (tem = negate_expr (arg1)))
6391 && TREE_CODE (tem) == INTEGER_CST
6392 && ! TREE_CONSTANT_OVERFLOW (tem))
6393 return fold (build (TRUTH_ANDIF_EXPR, type,
6394 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
6395 build (LE_EXPR, type,
6396 TREE_OPERAND (arg0, 0), arg1)));
6397
6398 /* If this is an EQ or NE comparison with zero and ARG0 is
6399 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
6400 two operations, but the latter can be done in one less insn
6401 on machines that have only two-operand insns or on which a
6402 constant cannot be the first operand. */
6403 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
6404 && TREE_CODE (arg0) == BIT_AND_EXPR)
6405 {
6406 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
6407 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
6408 return
6409 fold (build (code, type,
6410 build (BIT_AND_EXPR, TREE_TYPE (arg0),
6411 build (RSHIFT_EXPR,
6412 TREE_TYPE (TREE_OPERAND (arg0, 0)),
6413 TREE_OPERAND (arg0, 1),
6414 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
6415 convert (TREE_TYPE (arg0),
6416 integer_one_node)),
6417 arg1));
6418 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
6419 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
6420 return
6421 fold (build (code, type,
6422 build (BIT_AND_EXPR, TREE_TYPE (arg0),
6423 build (RSHIFT_EXPR,
6424 TREE_TYPE (TREE_OPERAND (arg0, 1)),
6425 TREE_OPERAND (arg0, 0),
6426 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
6427 convert (TREE_TYPE (arg0),
6428 integer_one_node)),
6429 arg1));
6430 }
6431
6432 /* If this is an NE or EQ comparison of zero against the result of a
6433 signed MOD operation whose second operand is a power of 2, make
6434 the MOD operation unsigned since it is simpler and equivalent. */
6435 if ((code == NE_EXPR || code == EQ_EXPR)
6436 && integer_zerop (arg1)
6437 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
6438 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
6439 || TREE_CODE (arg0) == CEIL_MOD_EXPR
6440 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
6441 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
6442 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6443 {
6444 tree newtype = unsigned_type (TREE_TYPE (arg0));
6445 tree newmod = build (TREE_CODE (arg0), newtype,
6446 convert (newtype, TREE_OPERAND (arg0, 0)),
6447 convert (newtype, TREE_OPERAND (arg0, 1)));
6448
6449 return build (code, type, newmod, convert (newtype, arg1));
6450 }
6451
6452 /* If this is an NE comparison of zero with an AND of one, remove the
6453 comparison since the AND will give the correct value. */
6454 if (code == NE_EXPR && integer_zerop (arg1)
6455 && TREE_CODE (arg0) == BIT_AND_EXPR
6456 && integer_onep (TREE_OPERAND (arg0, 1)))
6457 return convert (type, arg0);
6458
6459 /* If we have (A & C) == C where C is a power of 2, convert this into
6460 (A & C) != 0. Similarly for NE_EXPR. */
6461 if ((code == EQ_EXPR || code == NE_EXPR)
6462 && TREE_CODE (arg0) == BIT_AND_EXPR
6463 && integer_pow2p (TREE_OPERAND (arg0, 1))
6464 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
6465 return build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
6466 arg0, integer_zero_node);
6467
6468 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
6469 and similarly for >= into !=. */
6470 if ((code == LT_EXPR || code == GE_EXPR)
6471 && TREE_UNSIGNED (TREE_TYPE (arg0))
6472 && TREE_CODE (arg1) == LSHIFT_EXPR
6473 && integer_onep (TREE_OPERAND (arg1, 0)))
6474 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6475 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6476 TREE_OPERAND (arg1, 1)),
6477 convert (TREE_TYPE (arg0), integer_zero_node));
6478
6479 else if ((code == LT_EXPR || code == GE_EXPR)
6480 && TREE_UNSIGNED (TREE_TYPE (arg0))
6481 && (TREE_CODE (arg1) == NOP_EXPR
6482 || TREE_CODE (arg1) == CONVERT_EXPR)
6483 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
6484 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
6485 return
6486 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6487 convert (TREE_TYPE (arg0),
6488 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6489 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
6490 convert (TREE_TYPE (arg0), integer_zero_node));
6491
6492 /* Simplify comparison of something with itself. (For IEEE
6493 floating-point, we can only do some of these simplifications.) */
6494 if (operand_equal_p (arg0, arg1, 0))
6495 {
6496 switch (code)
6497 {
6498 case EQ_EXPR:
6499 case GE_EXPR:
6500 case LE_EXPR:
6501 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6502 return constant_boolean_node (1, type);
6503 code = EQ_EXPR;
6504 TREE_SET_CODE (t, code);
6505 break;
6506
6507 case NE_EXPR:
6508 /* For NE, we can only do this simplification if integer. */
6509 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6510 break;
6511 /* ... fall through ... */
6512 case GT_EXPR:
6513 case LT_EXPR:
6514 return constant_boolean_node (0, type);
6515 default:
6516 abort ();
6517 }
6518 }
6519
6520 /* An unsigned comparison against 0 can be simplified. */
6521 if (integer_zerop (arg1)
6522 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6523 || POINTER_TYPE_P (TREE_TYPE (arg1)))
6524 && TREE_UNSIGNED (TREE_TYPE (arg1)))
6525 {
6526 switch (TREE_CODE (t))
6527 {
6528 case GT_EXPR:
6529 code = NE_EXPR;
6530 TREE_SET_CODE (t, NE_EXPR);
6531 break;
6532 case LE_EXPR:
6533 code = EQ_EXPR;
6534 TREE_SET_CODE (t, EQ_EXPR);
6535 break;
6536 case GE_EXPR:
6537 return omit_one_operand (type,
6538 convert (type, integer_one_node),
6539 arg0);
6540 case LT_EXPR:
6541 return omit_one_operand (type,
6542 convert (type, integer_zero_node),
6543 arg0);
6544 default:
6545 break;
6546 }
6547 }
6548
6549 /* Comparisons with the highest or lowest possible integer of
6550 the specified size will have known values and an unsigned
6551 <= 0x7fffffff can be simplified. */
6552 {
6553 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
6554
6555 if (TREE_CODE (arg1) == INTEGER_CST
6556 && ! TREE_CONSTANT_OVERFLOW (arg1)
6557 && width <= HOST_BITS_PER_WIDE_INT
6558 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6559 || POINTER_TYPE_P (TREE_TYPE (arg1))))
6560 {
6561 if (TREE_INT_CST_HIGH (arg1) == 0
6562 && (TREE_INT_CST_LOW (arg1)
6563 == ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1)
6564 && ! TREE_UNSIGNED (TREE_TYPE (arg1)))
6565 switch (TREE_CODE (t))
6566 {
6567 case GT_EXPR:
6568 return omit_one_operand (type,
6569 convert (type, integer_zero_node),
6570 arg0);
6571 case GE_EXPR:
6572 TREE_SET_CODE (t, EQ_EXPR);
6573 break;
6574
6575 case LE_EXPR:
6576 return omit_one_operand (type,
6577 convert (type, integer_one_node),
6578 arg0);
6579 case LT_EXPR:
6580 TREE_SET_CODE (t, NE_EXPR);
6581 break;
6582
6583 default:
6584 break;
6585 }
6586
6587 else if (TREE_INT_CST_HIGH (arg1) == -1
6588 && (- TREE_INT_CST_LOW (arg1)
6589 == ((unsigned HOST_WIDE_INT) 1 << (width - 1)))
6590 && ! TREE_UNSIGNED (TREE_TYPE (arg1)))
6591 switch (TREE_CODE (t))
6592 {
6593 case LT_EXPR:
6594 return omit_one_operand (type,
6595 convert (type, integer_zero_node),
6596 arg0);
6597 case LE_EXPR:
6598 TREE_SET_CODE (t, EQ_EXPR);
6599 break;
6600
6601 case GE_EXPR:
6602 return omit_one_operand (type,
6603 convert (type, integer_one_node),
6604 arg0);
6605 case GT_EXPR:
6606 TREE_SET_CODE (t, NE_EXPR);
6607 break;
6608
6609 default:
6610 break;
6611 }
6612
6613 else if (TREE_INT_CST_HIGH (arg1) == 0
6614 && (TREE_INT_CST_LOW (arg1)
6615 == ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1)
6616 && TREE_UNSIGNED (TREE_TYPE (arg1))
6617 /* signed_type does not work on pointer types. */
6618 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
6619
6620 switch (TREE_CODE (t))
6621 {
6622 case LE_EXPR:
6623 return fold (build (GE_EXPR, type,
6624 convert (signed_type (TREE_TYPE (arg0)),
6625 arg0),
6626 convert (signed_type (TREE_TYPE (arg1)),
6627 integer_zero_node)));
6628 case GT_EXPR:
6629 return fold (build (LT_EXPR, type,
6630 convert (signed_type (TREE_TYPE (arg0)),
6631 arg0),
6632 convert (signed_type (TREE_TYPE (arg1)),
6633 integer_zero_node)));
6634
6635 default:
6636 break;
6637 }
6638 }
6639 }
6640
6641 /* If we are comparing an expression that just has comparisons
6642 of two integer values, arithmetic expressions of those comparisons,
6643 and constants, we can simplify it. There are only three cases
6644 to check: the two values can either be equal, the first can be
6645 greater, or the second can be greater. Fold the expression for
6646 those three values. Since each value must be 0 or 1, we have
6647 eight possibilities, each of which corresponds to the constant 0
6648 or 1 or one of the six possible comparisons.
6649
6650 This handles common cases like (a > b) == 0 but also handles
6651 expressions like ((x > y) - (y > x)) > 0, which supposedly
6652 occur in macroized code. */
6653
6654 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
6655 {
6656 tree cval1 = 0, cval2 = 0;
6657 int save_p = 0;
6658
6659 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
6660 /* Don't handle degenerate cases here; they should already
6661 have been handled anyway. */
6662 && cval1 != 0 && cval2 != 0
6663 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
6664 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
6665 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
6666 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
6667 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
6668 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
6669 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
6670 {
6671 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
6672 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
6673
6674 /* We can't just pass T to eval_subst in case cval1 or cval2
6675 was the same as ARG1. */
6676
6677 tree high_result
6678 = fold (build (code, type,
6679 eval_subst (arg0, cval1, maxval, cval2, minval),
6680 arg1));
6681 tree equal_result
6682 = fold (build (code, type,
6683 eval_subst (arg0, cval1, maxval, cval2, maxval),
6684 arg1));
6685 tree low_result
6686 = fold (build (code, type,
6687 eval_subst (arg0, cval1, minval, cval2, maxval),
6688 arg1));
6689
6690 /* All three of these results should be 0 or 1. Confirm they
6691 are. Then use those values to select the proper code
6692 to use. */
6693
6694 if ((integer_zerop (high_result)
6695 || integer_onep (high_result))
6696 && (integer_zerop (equal_result)
6697 || integer_onep (equal_result))
6698 && (integer_zerop (low_result)
6699 || integer_onep (low_result)))
6700 {
6701 /* Make a 3-bit mask with the high-order bit being the
6702 value for `>', the next for '=', and the low for '<'. */
6703 switch ((integer_onep (high_result) * 4)
6704 + (integer_onep (equal_result) * 2)
6705 + integer_onep (low_result))
6706 {
6707 case 0:
6708 /* Always false. */
6709 return omit_one_operand (type, integer_zero_node, arg0);
6710 case 1:
6711 code = LT_EXPR;
6712 break;
6713 case 2:
6714 code = EQ_EXPR;
6715 break;
6716 case 3:
6717 code = LE_EXPR;
6718 break;
6719 case 4:
6720 code = GT_EXPR;
6721 break;
6722 case 5:
6723 code = NE_EXPR;
6724 break;
6725 case 6:
6726 code = GE_EXPR;
6727 break;
6728 case 7:
6729 /* Always true. */
6730 return omit_one_operand (type, integer_one_node, arg0);
6731 }
6732
6733 t = build (code, type, cval1, cval2);
6734 if (save_p)
6735 return save_expr (t);
6736 else
6737 return fold (t);
6738 }
6739 }
6740 }
6741
6742 /* If this is a comparison of a field, we may be able to simplify it. */
6743 if ((TREE_CODE (arg0) == COMPONENT_REF
6744 || TREE_CODE (arg0) == BIT_FIELD_REF)
6745 && (code == EQ_EXPR || code == NE_EXPR)
6746 /* Handle the constant case even without -O
6747 to make sure the warnings are given. */
6748 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
6749 {
6750 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
6751 return t1 ? t1 : t;
6752 }
6753
6754 /* If this is a comparison of complex values and either or both sides
6755 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
6756 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
6757 This may prevent needless evaluations. */
6758 if ((code == EQ_EXPR || code == NE_EXPR)
6759 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
6760 && (TREE_CODE (arg0) == COMPLEX_EXPR
6761 || TREE_CODE (arg1) == COMPLEX_EXPR
6762 || TREE_CODE (arg0) == COMPLEX_CST
6763 || TREE_CODE (arg1) == COMPLEX_CST))
6764 {
6765 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
6766 tree real0, imag0, real1, imag1;
6767
6768 arg0 = save_expr (arg0);
6769 arg1 = save_expr (arg1);
6770 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
6771 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
6772 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
6773 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
6774
6775 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
6776 : TRUTH_ORIF_EXPR),
6777 type,
6778 fold (build (code, type, real0, real1)),
6779 fold (build (code, type, imag0, imag1))));
6780 }
6781
6782 /* From here on, the only cases we handle are when the result is
6783 known to be a constant.
6784
6785 To compute GT, swap the arguments and do LT.
6786 To compute GE, do LT and invert the result.
6787 To compute LE, swap the arguments, do LT and invert the result.
6788 To compute NE, do EQ and invert the result.
6789
6790 Therefore, the code below must handle only EQ and LT. */
6791
6792 if (code == LE_EXPR || code == GT_EXPR)
6793 {
6794 tem = arg0, arg0 = arg1, arg1 = tem;
6795 code = swap_tree_comparison (code);
6796 }
6797
6798 /* Note that it is safe to invert for real values here because we
6799 will check below in the one case that it matters. */
6800
6801 t1 = NULL_TREE;
6802 invert = 0;
6803 if (code == NE_EXPR || code == GE_EXPR)
6804 {
6805 invert = 1;
6806 code = invert_tree_comparison (code);
6807 }
6808
6809 /* Compute a result for LT or EQ if args permit;
6810 otherwise return T. */
6811 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
6812 {
6813 if (code == EQ_EXPR)
6814 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
6815 else
6816 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
6817 ? INT_CST_LT_UNSIGNED (arg0, arg1)
6818 : INT_CST_LT (arg0, arg1)),
6819 0);
6820 }
6821
6822 #if 0 /* This is no longer useful, but breaks some real code. */
6823 /* Assume a nonexplicit constant cannot equal an explicit one,
6824 since such code would be undefined anyway.
6825 Exception: on sysvr4, using #pragma weak,
6826 a label can come out as 0. */
6827 else if (TREE_CODE (arg1) == INTEGER_CST
6828 && !integer_zerop (arg1)
6829 && TREE_CONSTANT (arg0)
6830 && TREE_CODE (arg0) == ADDR_EXPR
6831 && code == EQ_EXPR)
6832 t1 = build_int_2 (0, 0);
6833 #endif
6834 /* Two real constants can be compared explicitly. */
6835 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
6836 {
6837 /* If either operand is a NaN, the result is false with two
6838 exceptions: First, an NE_EXPR is true on NaNs, but that case
6839 is already handled correctly since we will be inverting the
6840 result for NE_EXPR. Second, if we had inverted a LE_EXPR
6841 or a GE_EXPR into a LT_EXPR, we must return true so that it
6842 will be inverted into false. */
6843
6844 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
6845 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
6846 t1 = build_int_2 (invert && code == LT_EXPR, 0);
6847
6848 else if (code == EQ_EXPR)
6849 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
6850 TREE_REAL_CST (arg1)),
6851 0);
6852 else
6853 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
6854 TREE_REAL_CST (arg1)),
6855 0);
6856 }
6857
6858 if (t1 == NULL_TREE)
6859 return t;
6860
6861 if (invert)
6862 TREE_INT_CST_LOW (t1) ^= 1;
6863
6864 TREE_TYPE (t1) = type;
6865 if (TREE_CODE (type) == BOOLEAN_TYPE)
6866 return truthvalue_conversion (t1);
6867 return t1;
6868
6869 case COND_EXPR:
6870 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
6871 so all simple results must be passed through pedantic_non_lvalue. */
6872 if (TREE_CODE (arg0) == INTEGER_CST)
6873 return pedantic_non_lvalue
6874 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
6875 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
6876 return pedantic_omit_one_operand (type, arg1, arg0);
6877
6878 /* If the second operand is zero, invert the comparison and swap
6879 the second and third operands. Likewise if the second operand
6880 is constant and the third is not or if the third operand is
6881 equivalent to the first operand of the comparison. */
6882
6883 if (integer_zerop (arg1)
6884 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
6885 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6886 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6887 TREE_OPERAND (t, 2),
6888 TREE_OPERAND (arg0, 1))))
6889 {
6890 /* See if this can be inverted. If it can't, possibly because
6891 it was a floating-point inequality comparison, don't do
6892 anything. */
6893 tem = invert_truthvalue (arg0);
6894
6895 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
6896 {
6897 t = build (code, type, tem,
6898 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
6899 arg0 = tem;
6900 /* arg1 should be the first argument of the new T. */
6901 arg1 = TREE_OPERAND (t, 1);
6902 STRIP_NOPS (arg1);
6903 }
6904 }
6905
6906 /* If we have A op B ? A : C, we may be able to convert this to a
6907 simpler expression, depending on the operation and the values
6908 of B and C. IEEE floating point prevents this though,
6909 because A or B might be -0.0 or a NaN. */
6910
6911 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6912 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
6913 || ! FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
6914 || flag_unsafe_math_optimizations)
6915 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6916 arg1, TREE_OPERAND (arg0, 1)))
6917 {
6918 tree arg2 = TREE_OPERAND (t, 2);
6919 enum tree_code comp_code = TREE_CODE (arg0);
6920
6921 STRIP_NOPS (arg2);
6922
6923 /* If we have A op 0 ? A : -A, this is A, -A, abs (A), or abs (-A),
6924 depending on the comparison operation. */
6925 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
6926 ? real_zerop (TREE_OPERAND (arg0, 1))
6927 : integer_zerop (TREE_OPERAND (arg0, 1)))
6928 && TREE_CODE (arg2) == NEGATE_EXPR
6929 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
6930 switch (comp_code)
6931 {
6932 case EQ_EXPR:
6933 return
6934 pedantic_non_lvalue
6935 (convert (type,
6936 negate_expr
6937 (convert (TREE_TYPE (TREE_OPERAND (t, 1)),
6938 arg1))));
6939
6940 case NE_EXPR:
6941 return pedantic_non_lvalue (convert (type, arg1));
6942 case GE_EXPR:
6943 case GT_EXPR:
6944 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6945 arg1 = convert (signed_type (TREE_TYPE (arg1)), arg1);
6946 return pedantic_non_lvalue
6947 (convert (type, fold (build1 (ABS_EXPR,
6948 TREE_TYPE (arg1), arg1))));
6949 case LE_EXPR:
6950 case LT_EXPR:
6951 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6952 arg1 = convert (signed_type (TREE_TYPE (arg1)), arg1);
6953 return pedantic_non_lvalue
6954 (negate_expr (convert (type,
6955 fold (build1 (ABS_EXPR,
6956 TREE_TYPE (arg1),
6957 arg1)))));
6958 default:
6959 abort ();
6960 }
6961
6962 /* If this is A != 0 ? A : 0, this is simply A. For ==, it is
6963 always zero. */
6964
6965 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
6966 {
6967 if (comp_code == NE_EXPR)
6968 return pedantic_non_lvalue (convert (type, arg1));
6969 else if (comp_code == EQ_EXPR)
6970 return pedantic_non_lvalue (convert (type, integer_zero_node));
6971 }
6972
6973 /* If this is A op B ? A : B, this is either A, B, min (A, B),
6974 or max (A, B), depending on the operation. */
6975
6976 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
6977 arg2, TREE_OPERAND (arg0, 0)))
6978 {
6979 tree comp_op0 = TREE_OPERAND (arg0, 0);
6980 tree comp_op1 = TREE_OPERAND (arg0, 1);
6981 tree comp_type = TREE_TYPE (comp_op0);
6982
6983 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
6984 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
6985 comp_type = type;
6986
6987 switch (comp_code)
6988 {
6989 case EQ_EXPR:
6990 return pedantic_non_lvalue (convert (type, arg2));
6991 case NE_EXPR:
6992 return pedantic_non_lvalue (convert (type, arg1));
6993 case LE_EXPR:
6994 case LT_EXPR:
6995 /* In C++ a ?: expression can be an lvalue, so put the
6996 operand which will be used if they are equal first
6997 so that we can convert this back to the
6998 corresponding COND_EXPR. */
6999 return pedantic_non_lvalue
7000 (convert (type, fold (build (MIN_EXPR, comp_type,
7001 (comp_code == LE_EXPR
7002 ? comp_op0 : comp_op1),
7003 (comp_code == LE_EXPR
7004 ? comp_op1 : comp_op0)))));
7005 break;
7006 case GE_EXPR:
7007 case GT_EXPR:
7008 return pedantic_non_lvalue
7009 (convert (type, fold (build (MAX_EXPR, comp_type,
7010 (comp_code == GE_EXPR
7011 ? comp_op0 : comp_op1),
7012 (comp_code == GE_EXPR
7013 ? comp_op1 : comp_op0)))));
7014 break;
7015 default:
7016 abort ();
7017 }
7018 }
7019
7020 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
7021 we might still be able to simplify this. For example,
7022 if C1 is one less or one more than C2, this might have started
7023 out as a MIN or MAX and been transformed by this function.
7024 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
7025
7026 if (INTEGRAL_TYPE_P (type)
7027 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7028 && TREE_CODE (arg2) == INTEGER_CST)
7029 switch (comp_code)
7030 {
7031 case EQ_EXPR:
7032 /* We can replace A with C1 in this case. */
7033 arg1 = convert (type, TREE_OPERAND (arg0, 1));
7034 t = build (code, type, TREE_OPERAND (t, 0), arg1,
7035 TREE_OPERAND (t, 2));
7036 break;
7037
7038 case LT_EXPR:
7039 /* If C1 is C2 + 1, this is min(A, C2). */
7040 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
7041 && operand_equal_p (TREE_OPERAND (arg0, 1),
7042 const_binop (PLUS_EXPR, arg2,
7043 integer_one_node, 0), 1))
7044 return pedantic_non_lvalue
7045 (fold (build (MIN_EXPR, type, arg1, arg2)));
7046 break;
7047
7048 case LE_EXPR:
7049 /* If C1 is C2 - 1, this is min(A, C2). */
7050 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
7051 && operand_equal_p (TREE_OPERAND (arg0, 1),
7052 const_binop (MINUS_EXPR, arg2,
7053 integer_one_node, 0), 1))
7054 return pedantic_non_lvalue
7055 (fold (build (MIN_EXPR, type, arg1, arg2)));
7056 break;
7057
7058 case GT_EXPR:
7059 /* If C1 is C2 - 1, this is max(A, C2). */
7060 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
7061 && operand_equal_p (TREE_OPERAND (arg0, 1),
7062 const_binop (MINUS_EXPR, arg2,
7063 integer_one_node, 0), 1))
7064 return pedantic_non_lvalue
7065 (fold (build (MAX_EXPR, type, arg1, arg2)));
7066 break;
7067
7068 case GE_EXPR:
7069 /* If C1 is C2 + 1, this is max(A, C2). */
7070 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
7071 && operand_equal_p (TREE_OPERAND (arg0, 1),
7072 const_binop (PLUS_EXPR, arg2,
7073 integer_one_node, 0), 1))
7074 return pedantic_non_lvalue
7075 (fold (build (MAX_EXPR, type, arg1, arg2)));
7076 break;
7077 case NE_EXPR:
7078 break;
7079 default:
7080 abort ();
7081 }
7082 }
7083
7084 /* If the second operand is simpler than the third, swap them
7085 since that produces better jump optimization results. */
7086 if ((TREE_CONSTANT (arg1) || DECL_P (arg1)
7087 || TREE_CODE (arg1) == SAVE_EXPR)
7088 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
7089 || DECL_P (TREE_OPERAND (t, 2))
7090 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
7091 {
7092 /* See if this can be inverted. If it can't, possibly because
7093 it was a floating-point inequality comparison, don't do
7094 anything. */
7095 tem = invert_truthvalue (arg0);
7096
7097 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7098 {
7099 t = build (code, type, tem,
7100 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
7101 arg0 = tem;
7102 /* arg1 should be the first argument of the new T. */
7103 arg1 = TREE_OPERAND (t, 1);
7104 STRIP_NOPS (arg1);
7105 }
7106 }
7107
7108 /* Convert A ? 1 : 0 to simply A. */
7109 if (integer_onep (TREE_OPERAND (t, 1))
7110 && integer_zerop (TREE_OPERAND (t, 2))
7111 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
7112 call to fold will try to move the conversion inside
7113 a COND, which will recurse. In that case, the COND_EXPR
7114 is probably the best choice, so leave it alone. */
7115 && type == TREE_TYPE (arg0))
7116 return pedantic_non_lvalue (arg0);
7117
7118 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
7119 operation is simply A & 2. */
7120
7121 if (integer_zerop (TREE_OPERAND (t, 2))
7122 && TREE_CODE (arg0) == NE_EXPR
7123 && integer_zerop (TREE_OPERAND (arg0, 1))
7124 && integer_pow2p (arg1)
7125 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
7126 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
7127 arg1, 1))
7128 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
7129
7130 return t;
7131
7132 case COMPOUND_EXPR:
7133 /* When pedantic, a compound expression can be neither an lvalue
7134 nor an integer constant expression. */
7135 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
7136 return t;
7137 /* Don't let (0, 0) be null pointer constant. */
7138 if (integer_zerop (arg1))
7139 return build1 (NOP_EXPR, type, arg1);
7140 return convert (type, arg1);
7141
7142 case COMPLEX_EXPR:
7143 if (wins)
7144 return build_complex (type, arg0, arg1);
7145 return t;
7146
7147 case REALPART_EXPR:
7148 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7149 return t;
7150 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7151 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7152 TREE_OPERAND (arg0, 1));
7153 else if (TREE_CODE (arg0) == COMPLEX_CST)
7154 return TREE_REALPART (arg0);
7155 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7156 return fold (build (TREE_CODE (arg0), type,
7157 fold (build1 (REALPART_EXPR, type,
7158 TREE_OPERAND (arg0, 0))),
7159 fold (build1 (REALPART_EXPR,
7160 type, TREE_OPERAND (arg0, 1)))));
7161 return t;
7162
7163 case IMAGPART_EXPR:
7164 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7165 return convert (type, integer_zero_node);
7166 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7167 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7168 TREE_OPERAND (arg0, 0));
7169 else if (TREE_CODE (arg0) == COMPLEX_CST)
7170 return TREE_IMAGPART (arg0);
7171 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7172 return fold (build (TREE_CODE (arg0), type,
7173 fold (build1 (IMAGPART_EXPR, type,
7174 TREE_OPERAND (arg0, 0))),
7175 fold (build1 (IMAGPART_EXPR, type,
7176 TREE_OPERAND (arg0, 1)))));
7177 return t;
7178
7179 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
7180 appropriate. */
7181 case CLEANUP_POINT_EXPR:
7182 if (! has_cleanups (arg0))
7183 return TREE_OPERAND (t, 0);
7184
7185 {
7186 enum tree_code code0 = TREE_CODE (arg0);
7187 int kind0 = TREE_CODE_CLASS (code0);
7188 tree arg00 = TREE_OPERAND (arg0, 0);
7189 tree arg01;
7190
7191 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
7192 return fold (build1 (code0, type,
7193 fold (build1 (CLEANUP_POINT_EXPR,
7194 TREE_TYPE (arg00), arg00))));
7195
7196 if (kind0 == '<' || kind0 == '2'
7197 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
7198 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
7199 || code0 == TRUTH_XOR_EXPR)
7200 {
7201 arg01 = TREE_OPERAND (arg0, 1);
7202
7203 if (TREE_CONSTANT (arg00)
7204 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
7205 && ! has_cleanups (arg00)))
7206 return fold (build (code0, type, arg00,
7207 fold (build1 (CLEANUP_POINT_EXPR,
7208 TREE_TYPE (arg01), arg01))));
7209
7210 if (TREE_CONSTANT (arg01))
7211 return fold (build (code0, type,
7212 fold (build1 (CLEANUP_POINT_EXPR,
7213 TREE_TYPE (arg00), arg00)),
7214 arg01));
7215 }
7216
7217 return t;
7218 }
7219
7220 case CALL_EXPR:
7221 /* Check for a built-in function. */
7222 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
7223 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
7224 == FUNCTION_DECL)
7225 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
7226 {
7227 tree tmp = fold_builtin (expr);
7228 if (tmp)
7229 return tmp;
7230 }
7231 return t;
7232
7233 default:
7234 return t;
7235 } /* switch (code) */
7236 }
7237
7238 /* Determine if first argument is a multiple of second argument. Return 0 if
7239 it is not, or we cannot easily determined it to be.
7240
7241 An example of the sort of thing we care about (at this point; this routine
7242 could surely be made more general, and expanded to do what the *_DIV_EXPR's
7243 fold cases do now) is discovering that
7244
7245 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7246
7247 is a multiple of
7248
7249 SAVE_EXPR (J * 8)
7250
7251 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
7252
7253 This code also handles discovering that
7254
7255 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7256
7257 is a multiple of 8 so we don't have to worry about dealing with a
7258 possible remainder.
7259
7260 Note that we *look* inside a SAVE_EXPR only to determine how it was
7261 calculated; it is not safe for fold to do much of anything else with the
7262 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
7263 at run time. For example, the latter example above *cannot* be implemented
7264 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
7265 evaluation time of the original SAVE_EXPR is not necessarily the same at
7266 the time the new expression is evaluated. The only optimization of this
7267 sort that would be valid is changing
7268
7269 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
7270
7271 divided by 8 to
7272
7273 SAVE_EXPR (I) * SAVE_EXPR (J)
7274
7275 (where the same SAVE_EXPR (J) is used in the original and the
7276 transformed version). */
7277
7278 static int
7279 multiple_of_p (type, top, bottom)
7280 tree type;
7281 tree top;
7282 tree bottom;
7283 {
7284 if (operand_equal_p (top, bottom, 0))
7285 return 1;
7286
7287 if (TREE_CODE (type) != INTEGER_TYPE)
7288 return 0;
7289
7290 switch (TREE_CODE (top))
7291 {
7292 case MULT_EXPR:
7293 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7294 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7295
7296 case PLUS_EXPR:
7297 case MINUS_EXPR:
7298 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7299 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7300
7301 case LSHIFT_EXPR:
7302 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
7303 {
7304 tree op1, t1;
7305
7306 op1 = TREE_OPERAND (top, 1);
7307 /* const_binop may not detect overflow correctly,
7308 so check for it explicitly here. */
7309 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
7310 > TREE_INT_CST_LOW (op1)
7311 && TREE_INT_CST_HIGH (op1) == 0
7312 && 0 != (t1 = convert (type,
7313 const_binop (LSHIFT_EXPR, size_one_node,
7314 op1, 0)))
7315 && ! TREE_OVERFLOW (t1))
7316 return multiple_of_p (type, t1, bottom);
7317 }
7318 return 0;
7319
7320 case NOP_EXPR:
7321 /* Can't handle conversions from non-integral or wider integral type. */
7322 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
7323 || (TYPE_PRECISION (type)
7324 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
7325 return 0;
7326
7327 /* .. fall through ... */
7328
7329 case SAVE_EXPR:
7330 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
7331
7332 case INTEGER_CST:
7333 if (TREE_CODE (bottom) != INTEGER_CST
7334 || (TREE_UNSIGNED (type)
7335 && (tree_int_cst_sgn (top) < 0
7336 || tree_int_cst_sgn (bottom) < 0)))
7337 return 0;
7338 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
7339 top, bottom, 0));
7340
7341 default:
7342 return 0;
7343 }
7344 }
7345
7346 /* Return true if `t' is known to be non-negative. */
7347
7348 int
7349 tree_expr_nonnegative_p (t)
7350 tree t;
7351 {
7352 switch (TREE_CODE (t))
7353 {
7354 case INTEGER_CST:
7355 return tree_int_cst_sgn (t) >= 0;
7356 case COND_EXPR:
7357 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
7358 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
7359 case BIND_EXPR:
7360 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7361 case RTL_EXPR:
7362 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
7363
7364 default:
7365 if (truth_value_p (TREE_CODE (t)))
7366 /* Truth values evaluate to 0 or 1, which is nonnegative. */
7367 return 1;
7368 else
7369 /* We don't know sign of `t', so be conservative and return false. */
7370 return 0;
7371 }
7372 }
7373
7374 /* Return true if `r' is known to be non-negative.
7375 Only handles constants at the moment. */
7376
7377 int
7378 rtl_expr_nonnegative_p (r)
7379 rtx r;
7380 {
7381 switch (GET_CODE (r))
7382 {
7383 case CONST_INT:
7384 return INTVAL (r) >= 0;
7385
7386 case CONST_DOUBLE:
7387 if (GET_MODE (r) == VOIDmode)
7388 return CONST_DOUBLE_HIGH (r) >= 0;
7389 return 0;
7390
7391 case SYMBOL_REF:
7392 case LABEL_REF:
7393 /* These are always nonnegative. */
7394 return 1;
7395
7396 default:
7397 return 0;
7398 }
7399 }