convert.c (convert_to_integer): Don't pass the truncation down when the target type...
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "real.h"
50 #include "rtl.h"
51 #include "expr.h"
52 #include "tm_p.h"
53 #include "toplev.h"
54 #include "ggc.h"
55 #include "hashtab.h"
56 #include "langhooks.h"
57
58 static void encode PARAMS ((HOST_WIDE_INT *,
59 unsigned HOST_WIDE_INT,
60 HOST_WIDE_INT));
61 static void decode PARAMS ((HOST_WIDE_INT *,
62 unsigned HOST_WIDE_INT *,
63 HOST_WIDE_INT *));
64 static tree negate_expr PARAMS ((tree));
65 static tree split_tree PARAMS ((tree, enum tree_code, tree *, tree *,
66 tree *, int));
67 static tree associate_trees PARAMS ((tree, tree, enum tree_code, tree));
68 static tree int_const_binop PARAMS ((enum tree_code, tree, tree, int));
69 static tree const_binop PARAMS ((enum tree_code, tree, tree, int));
70 static hashval_t size_htab_hash PARAMS ((const void *));
71 static int size_htab_eq PARAMS ((const void *, const void *));
72 static tree fold_convert PARAMS ((tree, tree));
73 static enum tree_code invert_tree_comparison PARAMS ((enum tree_code));
74 static enum tree_code swap_tree_comparison PARAMS ((enum tree_code));
75 static int truth_value_p PARAMS ((enum tree_code));
76 static int operand_equal_for_comparison_p PARAMS ((tree, tree, tree));
77 static int twoval_comparison_p PARAMS ((tree, tree *, tree *, int *));
78 static tree eval_subst PARAMS ((tree, tree, tree, tree, tree));
79 static tree omit_one_operand PARAMS ((tree, tree, tree));
80 static tree pedantic_omit_one_operand PARAMS ((tree, tree, tree));
81 static tree distribute_bit_expr PARAMS ((enum tree_code, tree, tree, tree));
82 static tree make_bit_field_ref PARAMS ((tree, tree, int, int, int));
83 static tree optimize_bit_field_compare PARAMS ((enum tree_code, tree,
84 tree, tree));
85 static tree decode_field_reference PARAMS ((tree, HOST_WIDE_INT *,
86 HOST_WIDE_INT *,
87 enum machine_mode *, int *,
88 int *, tree *, tree *));
89 static int all_ones_mask_p PARAMS ((tree, int));
90 static tree sign_bit_p PARAMS ((tree, tree));
91 static int simple_operand_p PARAMS ((tree));
92 static tree range_binop PARAMS ((enum tree_code, tree, tree, int,
93 tree, int));
94 static tree make_range PARAMS ((tree, int *, tree *, tree *));
95 static tree build_range_check PARAMS ((tree, tree, int, tree, tree));
96 static int merge_ranges PARAMS ((int *, tree *, tree *, int, tree, tree,
97 int, tree, tree));
98 static tree fold_range_test PARAMS ((tree));
99 static tree unextend PARAMS ((tree, int, int, tree));
100 static tree fold_truthop PARAMS ((enum tree_code, tree, tree, tree));
101 static tree optimize_minmax_comparison PARAMS ((tree));
102 static tree extract_muldiv PARAMS ((tree, tree, enum tree_code, tree));
103 static tree strip_compound_expr PARAMS ((tree, tree));
104 static int multiple_of_p PARAMS ((tree, tree, tree));
105 static tree constant_boolean_node PARAMS ((int, tree));
106 static int count_cond PARAMS ((tree, int));
107 static tree fold_binary_op_with_conditional_arg
108 PARAMS ((enum tree_code, tree, tree, tree, int));
109 static bool fold_real_zero_addition_p PARAMS ((tree, tree, int));
110
111 #if defined(HOST_EBCDIC)
112 /* bit 8 is significant in EBCDIC */
113 #define CHARMASK 0xff
114 #else
115 #define CHARMASK 0x7f
116 #endif
117
118 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
119 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
120 and SUM1. Then this yields nonzero if overflow occurred during the
121 addition.
122
123 Overflow occurs if A and B have the same sign, but A and SUM differ in
124 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
125 sign. */
126 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
127 \f
128 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
129 We do that by representing the two-word integer in 4 words, with only
130 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
131 number. The value of the word is LOWPART + HIGHPART * BASE. */
132
133 #define LOWPART(x) \
134 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
135 #define HIGHPART(x) \
136 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
137 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
138
139 /* Unpack a two-word integer into 4 words.
140 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
141 WORDS points to the array of HOST_WIDE_INTs. */
142
143 static void
144 encode (words, low, hi)
145 HOST_WIDE_INT *words;
146 unsigned HOST_WIDE_INT low;
147 HOST_WIDE_INT hi;
148 {
149 words[0] = LOWPART (low);
150 words[1] = HIGHPART (low);
151 words[2] = LOWPART (hi);
152 words[3] = HIGHPART (hi);
153 }
154
155 /* Pack an array of 4 words into a two-word integer.
156 WORDS points to the array of words.
157 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
158
159 static void
160 decode (words, low, hi)
161 HOST_WIDE_INT *words;
162 unsigned HOST_WIDE_INT *low;
163 HOST_WIDE_INT *hi;
164 {
165 *low = words[0] + words[1] * BASE;
166 *hi = words[2] + words[3] * BASE;
167 }
168 \f
169 /* Make the integer constant T valid for its type by setting to 0 or 1 all
170 the bits in the constant that don't belong in the type.
171
172 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
173 nonzero, a signed overflow has already occurred in calculating T, so
174 propagate it.
175
176 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
177 if it exists. */
178
179 int
180 force_fit_type (t, overflow)
181 tree t;
182 int overflow;
183 {
184 unsigned HOST_WIDE_INT low;
185 HOST_WIDE_INT high;
186 unsigned int prec;
187
188 if (TREE_CODE (t) == REAL_CST)
189 {
190 #ifdef CHECK_FLOAT_VALUE
191 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t)), TREE_REAL_CST (t),
192 overflow);
193 #endif
194 return overflow;
195 }
196
197 else if (TREE_CODE (t) != INTEGER_CST)
198 return overflow;
199
200 low = TREE_INT_CST_LOW (t);
201 high = TREE_INT_CST_HIGH (t);
202
203 if (POINTER_TYPE_P (TREE_TYPE (t)))
204 prec = POINTER_SIZE;
205 else
206 prec = TYPE_PRECISION (TREE_TYPE (t));
207
208 /* First clear all bits that are beyond the type's precision. */
209
210 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
211 ;
212 else if (prec > HOST_BITS_PER_WIDE_INT)
213 TREE_INT_CST_HIGH (t)
214 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
215 else
216 {
217 TREE_INT_CST_HIGH (t) = 0;
218 if (prec < HOST_BITS_PER_WIDE_INT)
219 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
220 }
221
222 /* Unsigned types do not suffer sign extension or overflow unless they
223 are a sizetype. */
224 if (TREE_UNSIGNED (TREE_TYPE (t))
225 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
226 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
227 return overflow;
228
229 /* If the value's sign bit is set, extend the sign. */
230 if (prec != 2 * HOST_BITS_PER_WIDE_INT
231 && (prec > HOST_BITS_PER_WIDE_INT
232 ? 0 != (TREE_INT_CST_HIGH (t)
233 & ((HOST_WIDE_INT) 1
234 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
235 : 0 != (TREE_INT_CST_LOW (t)
236 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
237 {
238 /* Value is negative:
239 set to 1 all the bits that are outside this type's precision. */
240 if (prec > HOST_BITS_PER_WIDE_INT)
241 TREE_INT_CST_HIGH (t)
242 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
243 else
244 {
245 TREE_INT_CST_HIGH (t) = -1;
246 if (prec < HOST_BITS_PER_WIDE_INT)
247 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
248 }
249 }
250
251 /* Return nonzero if signed overflow occurred. */
252 return
253 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
254 != 0);
255 }
256 \f
257 /* Add two doubleword integers with doubleword result.
258 Each argument is given as two `HOST_WIDE_INT' pieces.
259 One argument is L1 and H1; the other, L2 and H2.
260 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
261
262 int
263 add_double (l1, h1, l2, h2, lv, hv)
264 unsigned HOST_WIDE_INT l1, l2;
265 HOST_WIDE_INT h1, h2;
266 unsigned HOST_WIDE_INT *lv;
267 HOST_WIDE_INT *hv;
268 {
269 unsigned HOST_WIDE_INT l;
270 HOST_WIDE_INT h;
271
272 l = l1 + l2;
273 h = h1 + h2 + (l < l1);
274
275 *lv = l;
276 *hv = h;
277 return OVERFLOW_SUM_SIGN (h1, h2, h);
278 }
279
280 /* Negate a doubleword integer with doubleword result.
281 Return nonzero if the operation overflows, assuming it's signed.
282 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
283 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
284
285 int
286 neg_double (l1, h1, lv, hv)
287 unsigned HOST_WIDE_INT l1;
288 HOST_WIDE_INT h1;
289 unsigned HOST_WIDE_INT *lv;
290 HOST_WIDE_INT *hv;
291 {
292 if (l1 == 0)
293 {
294 *lv = 0;
295 *hv = - h1;
296 return (*hv & h1) < 0;
297 }
298 else
299 {
300 *lv = -l1;
301 *hv = ~h1;
302 return 0;
303 }
304 }
305 \f
306 /* Multiply two doubleword integers with doubleword result.
307 Return nonzero if the operation overflows, assuming it's signed.
308 Each argument is given as two `HOST_WIDE_INT' pieces.
309 One argument is L1 and H1; the other, L2 and H2.
310 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
311
312 int
313 mul_double (l1, h1, l2, h2, lv, hv)
314 unsigned HOST_WIDE_INT l1, l2;
315 HOST_WIDE_INT h1, h2;
316 unsigned HOST_WIDE_INT *lv;
317 HOST_WIDE_INT *hv;
318 {
319 HOST_WIDE_INT arg1[4];
320 HOST_WIDE_INT arg2[4];
321 HOST_WIDE_INT prod[4 * 2];
322 unsigned HOST_WIDE_INT carry;
323 int i, j, k;
324 unsigned HOST_WIDE_INT toplow, neglow;
325 HOST_WIDE_INT tophigh, neghigh;
326
327 encode (arg1, l1, h1);
328 encode (arg2, l2, h2);
329
330 memset ((char *) prod, 0, sizeof prod);
331
332 for (i = 0; i < 4; i++)
333 {
334 carry = 0;
335 for (j = 0; j < 4; j++)
336 {
337 k = i + j;
338 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
339 carry += arg1[i] * arg2[j];
340 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
341 carry += prod[k];
342 prod[k] = LOWPART (carry);
343 carry = HIGHPART (carry);
344 }
345 prod[i + 4] = carry;
346 }
347
348 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
349
350 /* Check for overflow by calculating the top half of the answer in full;
351 it should agree with the low half's sign bit. */
352 decode (prod + 4, &toplow, &tophigh);
353 if (h1 < 0)
354 {
355 neg_double (l2, h2, &neglow, &neghigh);
356 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
357 }
358 if (h2 < 0)
359 {
360 neg_double (l1, h1, &neglow, &neghigh);
361 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
362 }
363 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
364 }
365 \f
366 /* Shift the doubleword integer in L1, H1 left by COUNT places
367 keeping only PREC bits of result.
368 Shift right if COUNT is negative.
369 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
370 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
371
372 void
373 lshift_double (l1, h1, count, prec, lv, hv, arith)
374 unsigned HOST_WIDE_INT l1;
375 HOST_WIDE_INT h1, count;
376 unsigned int prec;
377 unsigned HOST_WIDE_INT *lv;
378 HOST_WIDE_INT *hv;
379 int arith;
380 {
381 unsigned HOST_WIDE_INT signmask;
382
383 if (count < 0)
384 {
385 rshift_double (l1, h1, -count, prec, lv, hv, arith);
386 return;
387 }
388
389 #ifdef SHIFT_COUNT_TRUNCATED
390 if (SHIFT_COUNT_TRUNCATED)
391 count %= prec;
392 #endif
393
394 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
395 {
396 /* Shifting by the host word size is undefined according to the
397 ANSI standard, so we must handle this as a special case. */
398 *hv = 0;
399 *lv = 0;
400 }
401 else if (count >= HOST_BITS_PER_WIDE_INT)
402 {
403 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
404 *lv = 0;
405 }
406 else
407 {
408 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
409 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
410 *lv = l1 << count;
411 }
412
413 /* Sign extend all bits that are beyond the precision. */
414
415 signmask = -((prec > HOST_BITS_PER_WIDE_INT
416 ? ((unsigned HOST_WIDE_INT) *hv
417 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
418 : (*lv >> (prec - 1))) & 1);
419
420 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
421 ;
422 else if (prec >= HOST_BITS_PER_WIDE_INT)
423 {
424 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
425 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
426 }
427 else
428 {
429 *hv = signmask;
430 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
431 *lv |= signmask << prec;
432 }
433 }
434
435 /* Shift the doubleword integer in L1, H1 right by COUNT places
436 keeping only PREC bits of result. COUNT must be positive.
437 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
438 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
439
440 void
441 rshift_double (l1, h1, count, prec, lv, hv, arith)
442 unsigned HOST_WIDE_INT l1;
443 HOST_WIDE_INT h1, count;
444 unsigned int prec;
445 unsigned HOST_WIDE_INT *lv;
446 HOST_WIDE_INT *hv;
447 int arith;
448 {
449 unsigned HOST_WIDE_INT signmask;
450
451 signmask = (arith
452 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
453 : 0);
454
455 #ifdef SHIFT_COUNT_TRUNCATED
456 if (SHIFT_COUNT_TRUNCATED)
457 count %= prec;
458 #endif
459
460 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
461 {
462 /* Shifting by the host word size is undefined according to the
463 ANSI standard, so we must handle this as a special case. */
464 *hv = 0;
465 *lv = 0;
466 }
467 else if (count >= HOST_BITS_PER_WIDE_INT)
468 {
469 *hv = 0;
470 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
471 }
472 else
473 {
474 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
475 *lv = ((l1 >> count)
476 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
477 }
478
479 /* Zero / sign extend all bits that are beyond the precision. */
480
481 if (count >= (HOST_WIDE_INT)prec)
482 {
483 *hv = signmask;
484 *lv = signmask;
485 }
486 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
487 ;
488 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
489 {
490 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
491 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
492 }
493 else
494 {
495 *hv = signmask;
496 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
497 *lv |= signmask << (prec - count);
498 }
499 }
500 \f
501 /* Rotate the doubleword integer in L1, H1 left by COUNT places
502 keeping only PREC bits of result.
503 Rotate right if COUNT is negative.
504 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
505
506 void
507 lrotate_double (l1, h1, count, prec, lv, hv)
508 unsigned HOST_WIDE_INT l1;
509 HOST_WIDE_INT h1, count;
510 unsigned int prec;
511 unsigned HOST_WIDE_INT *lv;
512 HOST_WIDE_INT *hv;
513 {
514 unsigned HOST_WIDE_INT s1l, s2l;
515 HOST_WIDE_INT s1h, s2h;
516
517 count %= prec;
518 if (count < 0)
519 count += prec;
520
521 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
522 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
523 *lv = s1l | s2l;
524 *hv = s1h | s2h;
525 }
526
527 /* Rotate the doubleword integer in L1, H1 left by COUNT places
528 keeping only PREC bits of result. COUNT must be positive.
529 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
530
531 void
532 rrotate_double (l1, h1, count, prec, lv, hv)
533 unsigned HOST_WIDE_INT l1;
534 HOST_WIDE_INT h1, count;
535 unsigned int prec;
536 unsigned HOST_WIDE_INT *lv;
537 HOST_WIDE_INT *hv;
538 {
539 unsigned HOST_WIDE_INT s1l, s2l;
540 HOST_WIDE_INT s1h, s2h;
541
542 count %= prec;
543 if (count < 0)
544 count += prec;
545
546 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
547 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
548 *lv = s1l | s2l;
549 *hv = s1h | s2h;
550 }
551 \f
552 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
553 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
554 CODE is a tree code for a kind of division, one of
555 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
556 or EXACT_DIV_EXPR
557 It controls how the quotient is rounded to an integer.
558 Return nonzero if the operation overflows.
559 UNS nonzero says do unsigned division. */
560
561 int
562 div_and_round_double (code, uns,
563 lnum_orig, hnum_orig, lden_orig, hden_orig,
564 lquo, hquo, lrem, hrem)
565 enum tree_code code;
566 int uns;
567 unsigned HOST_WIDE_INT lnum_orig; /* num == numerator == dividend */
568 HOST_WIDE_INT hnum_orig;
569 unsigned HOST_WIDE_INT lden_orig; /* den == denominator == divisor */
570 HOST_WIDE_INT hden_orig;
571 unsigned HOST_WIDE_INT *lquo, *lrem;
572 HOST_WIDE_INT *hquo, *hrem;
573 {
574 int quo_neg = 0;
575 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
576 HOST_WIDE_INT den[4], quo[4];
577 int i, j;
578 unsigned HOST_WIDE_INT work;
579 unsigned HOST_WIDE_INT carry = 0;
580 unsigned HOST_WIDE_INT lnum = lnum_orig;
581 HOST_WIDE_INT hnum = hnum_orig;
582 unsigned HOST_WIDE_INT lden = lden_orig;
583 HOST_WIDE_INT hden = hden_orig;
584 int overflow = 0;
585
586 if (hden == 0 && lden == 0)
587 overflow = 1, lden = 1;
588
589 /* calculate quotient sign and convert operands to unsigned. */
590 if (!uns)
591 {
592 if (hnum < 0)
593 {
594 quo_neg = ~ quo_neg;
595 /* (minimum integer) / (-1) is the only overflow case. */
596 if (neg_double (lnum, hnum, &lnum, &hnum)
597 && ((HOST_WIDE_INT) lden & hden) == -1)
598 overflow = 1;
599 }
600 if (hden < 0)
601 {
602 quo_neg = ~ quo_neg;
603 neg_double (lden, hden, &lden, &hden);
604 }
605 }
606
607 if (hnum == 0 && hden == 0)
608 { /* single precision */
609 *hquo = *hrem = 0;
610 /* This unsigned division rounds toward zero. */
611 *lquo = lnum / lden;
612 goto finish_up;
613 }
614
615 if (hnum == 0)
616 { /* trivial case: dividend < divisor */
617 /* hden != 0 already checked. */
618 *hquo = *lquo = 0;
619 *hrem = hnum;
620 *lrem = lnum;
621 goto finish_up;
622 }
623
624 memset ((char *) quo, 0, sizeof quo);
625
626 memset ((char *) num, 0, sizeof num); /* to zero 9th element */
627 memset ((char *) den, 0, sizeof den);
628
629 encode (num, lnum, hnum);
630 encode (den, lden, hden);
631
632 /* Special code for when the divisor < BASE. */
633 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
634 {
635 /* hnum != 0 already checked. */
636 for (i = 4 - 1; i >= 0; i--)
637 {
638 work = num[i] + carry * BASE;
639 quo[i] = work / lden;
640 carry = work % lden;
641 }
642 }
643 else
644 {
645 /* Full double precision division,
646 with thanks to Don Knuth's "Seminumerical Algorithms". */
647 int num_hi_sig, den_hi_sig;
648 unsigned HOST_WIDE_INT quo_est, scale;
649
650 /* Find the highest non-zero divisor digit. */
651 for (i = 4 - 1;; i--)
652 if (den[i] != 0)
653 {
654 den_hi_sig = i;
655 break;
656 }
657
658 /* Insure that the first digit of the divisor is at least BASE/2.
659 This is required by the quotient digit estimation algorithm. */
660
661 scale = BASE / (den[den_hi_sig] + 1);
662 if (scale > 1)
663 { /* scale divisor and dividend */
664 carry = 0;
665 for (i = 0; i <= 4 - 1; i++)
666 {
667 work = (num[i] * scale) + carry;
668 num[i] = LOWPART (work);
669 carry = HIGHPART (work);
670 }
671
672 num[4] = carry;
673 carry = 0;
674 for (i = 0; i <= 4 - 1; i++)
675 {
676 work = (den[i] * scale) + carry;
677 den[i] = LOWPART (work);
678 carry = HIGHPART (work);
679 if (den[i] != 0) den_hi_sig = i;
680 }
681 }
682
683 num_hi_sig = 4;
684
685 /* Main loop */
686 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
687 {
688 /* Guess the next quotient digit, quo_est, by dividing the first
689 two remaining dividend digits by the high order quotient digit.
690 quo_est is never low and is at most 2 high. */
691 unsigned HOST_WIDE_INT tmp;
692
693 num_hi_sig = i + den_hi_sig + 1;
694 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
695 if (num[num_hi_sig] != den[den_hi_sig])
696 quo_est = work / den[den_hi_sig];
697 else
698 quo_est = BASE - 1;
699
700 /* Refine quo_est so it's usually correct, and at most one high. */
701 tmp = work - quo_est * den[den_hi_sig];
702 if (tmp < BASE
703 && (den[den_hi_sig - 1] * quo_est
704 > (tmp * BASE + num[num_hi_sig - 2])))
705 quo_est--;
706
707 /* Try QUO_EST as the quotient digit, by multiplying the
708 divisor by QUO_EST and subtracting from the remaining dividend.
709 Keep in mind that QUO_EST is the I - 1st digit. */
710
711 carry = 0;
712 for (j = 0; j <= den_hi_sig; j++)
713 {
714 work = quo_est * den[j] + carry;
715 carry = HIGHPART (work);
716 work = num[i + j] - LOWPART (work);
717 num[i + j] = LOWPART (work);
718 carry += HIGHPART (work) != 0;
719 }
720
721 /* If quo_est was high by one, then num[i] went negative and
722 we need to correct things. */
723 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
724 {
725 quo_est--;
726 carry = 0; /* add divisor back in */
727 for (j = 0; j <= den_hi_sig; j++)
728 {
729 work = num[i + j] + den[j] + carry;
730 carry = HIGHPART (work);
731 num[i + j] = LOWPART (work);
732 }
733
734 num [num_hi_sig] += carry;
735 }
736
737 /* Store the quotient digit. */
738 quo[i] = quo_est;
739 }
740 }
741
742 decode (quo, lquo, hquo);
743
744 finish_up:
745 /* if result is negative, make it so. */
746 if (quo_neg)
747 neg_double (*lquo, *hquo, lquo, hquo);
748
749 /* compute trial remainder: rem = num - (quo * den) */
750 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
751 neg_double (*lrem, *hrem, lrem, hrem);
752 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
753
754 switch (code)
755 {
756 case TRUNC_DIV_EXPR:
757 case TRUNC_MOD_EXPR: /* round toward zero */
758 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
759 return overflow;
760
761 case FLOOR_DIV_EXPR:
762 case FLOOR_MOD_EXPR: /* round toward negative infinity */
763 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
764 {
765 /* quo = quo - 1; */
766 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
767 lquo, hquo);
768 }
769 else
770 return overflow;
771 break;
772
773 case CEIL_DIV_EXPR:
774 case CEIL_MOD_EXPR: /* round toward positive infinity */
775 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
776 {
777 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
778 lquo, hquo);
779 }
780 else
781 return overflow;
782 break;
783
784 case ROUND_DIV_EXPR:
785 case ROUND_MOD_EXPR: /* round to closest integer */
786 {
787 unsigned HOST_WIDE_INT labs_rem = *lrem;
788 HOST_WIDE_INT habs_rem = *hrem;
789 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
790 HOST_WIDE_INT habs_den = hden, htwice;
791
792 /* Get absolute values */
793 if (*hrem < 0)
794 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
795 if (hden < 0)
796 neg_double (lden, hden, &labs_den, &habs_den);
797
798 /* If (2 * abs (lrem) >= abs (lden)) */
799 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
800 labs_rem, habs_rem, &ltwice, &htwice);
801
802 if (((unsigned HOST_WIDE_INT) habs_den
803 < (unsigned HOST_WIDE_INT) htwice)
804 || (((unsigned HOST_WIDE_INT) habs_den
805 == (unsigned HOST_WIDE_INT) htwice)
806 && (labs_den < ltwice)))
807 {
808 if (*hquo < 0)
809 /* quo = quo - 1; */
810 add_double (*lquo, *hquo,
811 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
812 else
813 /* quo = quo + 1; */
814 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
815 lquo, hquo);
816 }
817 else
818 return overflow;
819 }
820 break;
821
822 default:
823 abort ();
824 }
825
826 /* compute true remainder: rem = num - (quo * den) */
827 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
828 neg_double (*lrem, *hrem, lrem, hrem);
829 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
830 return overflow;
831 }
832 \f
833 /* Given T, an expression, return the negation of T. Allow for T to be
834 null, in which case return null. */
835
836 static tree
837 negate_expr (t)
838 tree t;
839 {
840 tree type;
841 tree tem;
842
843 if (t == 0)
844 return 0;
845
846 type = TREE_TYPE (t);
847 STRIP_SIGN_NOPS (t);
848
849 switch (TREE_CODE (t))
850 {
851 case INTEGER_CST:
852 case REAL_CST:
853 if (! TREE_UNSIGNED (type)
854 && 0 != (tem = fold (build1 (NEGATE_EXPR, type, t)))
855 && ! TREE_OVERFLOW (tem))
856 return tem;
857 break;
858
859 case NEGATE_EXPR:
860 return convert (type, TREE_OPERAND (t, 0));
861
862 case MINUS_EXPR:
863 /* - (A - B) -> B - A */
864 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
865 return convert (type,
866 fold (build (MINUS_EXPR, TREE_TYPE (t),
867 TREE_OPERAND (t, 1),
868 TREE_OPERAND (t, 0))));
869 break;
870
871 default:
872 break;
873 }
874
875 return convert (type, fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t)));
876 }
877 \f
878 /* Split a tree IN into a constant, literal and variable parts that could be
879 combined with CODE to make IN. "constant" means an expression with
880 TREE_CONSTANT but that isn't an actual constant. CODE must be a
881 commutative arithmetic operation. Store the constant part into *CONP,
882 the literal in *LITP and return the variable part. If a part isn't
883 present, set it to null. If the tree does not decompose in this way,
884 return the entire tree as the variable part and the other parts as null.
885
886 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
887 case, we negate an operand that was subtracted. Except if it is a
888 literal for which we use *MINUS_LITP instead.
889
890 If NEGATE_P is true, we are negating all of IN, again except a literal
891 for which we use *MINUS_LITP instead.
892
893 If IN is itself a literal or constant, return it as appropriate.
894
895 Note that we do not guarantee that any of the three values will be the
896 same type as IN, but they will have the same signedness and mode. */
897
898 static tree
899 split_tree (in, code, conp, litp, minus_litp, negate_p)
900 tree in;
901 enum tree_code code;
902 tree *conp, *litp, *minus_litp;
903 int negate_p;
904 {
905 tree var = 0;
906
907 *conp = 0;
908 *litp = 0;
909 *minus_litp = 0;
910
911 /* Strip any conversions that don't change the machine mode or signedness. */
912 STRIP_SIGN_NOPS (in);
913
914 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
915 *litp = in;
916 else if (TREE_CODE (in) == code
917 || (! FLOAT_TYPE_P (TREE_TYPE (in))
918 /* We can associate addition and subtraction together (even
919 though the C standard doesn't say so) for integers because
920 the value is not affected. For reals, the value might be
921 affected, so we can't. */
922 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
923 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
924 {
925 tree op0 = TREE_OPERAND (in, 0);
926 tree op1 = TREE_OPERAND (in, 1);
927 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
928 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
929
930 /* First see if either of the operands is a literal, then a constant. */
931 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
932 *litp = op0, op0 = 0;
933 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
934 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
935
936 if (op0 != 0 && TREE_CONSTANT (op0))
937 *conp = op0, op0 = 0;
938 else if (op1 != 0 && TREE_CONSTANT (op1))
939 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
940
941 /* If we haven't dealt with either operand, this is not a case we can
942 decompose. Otherwise, VAR is either of the ones remaining, if any. */
943 if (op0 != 0 && op1 != 0)
944 var = in;
945 else if (op0 != 0)
946 var = op0;
947 else
948 var = op1, neg_var_p = neg1_p;
949
950 /* Now do any needed negations. */
951 if (neg_litp_p)
952 *minus_litp = *litp, *litp = 0;
953 if (neg_conp_p)
954 *conp = negate_expr (*conp);
955 if (neg_var_p)
956 var = negate_expr (var);
957 }
958 else if (TREE_CONSTANT (in))
959 *conp = in;
960 else
961 var = in;
962
963 if (negate_p)
964 {
965 if (*litp)
966 *minus_litp = *litp, *litp = 0;
967 else if (*minus_litp)
968 *litp = *minus_litp, *minus_litp = 0;
969 *conp = negate_expr (*conp);
970 var = negate_expr (var);
971 }
972
973 return var;
974 }
975
976 /* Re-associate trees split by the above function. T1 and T2 are either
977 expressions to associate or null. Return the new expression, if any. If
978 we build an operation, do it in TYPE and with CODE. */
979
980 static tree
981 associate_trees (t1, t2, code, type)
982 tree t1, t2;
983 enum tree_code code;
984 tree type;
985 {
986 if (t1 == 0)
987 return t2;
988 else if (t2 == 0)
989 return t1;
990
991 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
992 try to fold this since we will have infinite recursion. But do
993 deal with any NEGATE_EXPRs. */
994 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
995 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
996 {
997 if (TREE_CODE (t1) == NEGATE_EXPR)
998 return build (MINUS_EXPR, type, convert (type, t2),
999 convert (type, TREE_OPERAND (t1, 0)));
1000 else if (TREE_CODE (t2) == NEGATE_EXPR)
1001 return build (MINUS_EXPR, type, convert (type, t1),
1002 convert (type, TREE_OPERAND (t2, 0)));
1003 else
1004 return build (code, type, convert (type, t1), convert (type, t2));
1005 }
1006
1007 return fold (build (code, type, convert (type, t1), convert (type, t2)));
1008 }
1009 \f
1010 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1011 to produce a new constant.
1012
1013 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1014
1015 static tree
1016 int_const_binop (code, arg1, arg2, notrunc)
1017 enum tree_code code;
1018 tree arg1, arg2;
1019 int notrunc;
1020 {
1021 unsigned HOST_WIDE_INT int1l, int2l;
1022 HOST_WIDE_INT int1h, int2h;
1023 unsigned HOST_WIDE_INT low;
1024 HOST_WIDE_INT hi;
1025 unsigned HOST_WIDE_INT garbagel;
1026 HOST_WIDE_INT garbageh;
1027 tree t;
1028 tree type = TREE_TYPE (arg1);
1029 int uns = TREE_UNSIGNED (type);
1030 int is_sizetype
1031 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1032 int overflow = 0;
1033 int no_overflow = 0;
1034
1035 int1l = TREE_INT_CST_LOW (arg1);
1036 int1h = TREE_INT_CST_HIGH (arg1);
1037 int2l = TREE_INT_CST_LOW (arg2);
1038 int2h = TREE_INT_CST_HIGH (arg2);
1039
1040 switch (code)
1041 {
1042 case BIT_IOR_EXPR:
1043 low = int1l | int2l, hi = int1h | int2h;
1044 break;
1045
1046 case BIT_XOR_EXPR:
1047 low = int1l ^ int2l, hi = int1h ^ int2h;
1048 break;
1049
1050 case BIT_AND_EXPR:
1051 low = int1l & int2l, hi = int1h & int2h;
1052 break;
1053
1054 case BIT_ANDTC_EXPR:
1055 low = int1l & ~int2l, hi = int1h & ~int2h;
1056 break;
1057
1058 case RSHIFT_EXPR:
1059 int2l = -int2l;
1060 case LSHIFT_EXPR:
1061 /* It's unclear from the C standard whether shifts can overflow.
1062 The following code ignores overflow; perhaps a C standard
1063 interpretation ruling is needed. */
1064 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1065 &low, &hi, !uns);
1066 no_overflow = 1;
1067 break;
1068
1069 case RROTATE_EXPR:
1070 int2l = - int2l;
1071 case LROTATE_EXPR:
1072 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1073 &low, &hi);
1074 break;
1075
1076 case PLUS_EXPR:
1077 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1078 break;
1079
1080 case MINUS_EXPR:
1081 neg_double (int2l, int2h, &low, &hi);
1082 add_double (int1l, int1h, low, hi, &low, &hi);
1083 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1084 break;
1085
1086 case MULT_EXPR:
1087 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1088 break;
1089
1090 case TRUNC_DIV_EXPR:
1091 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1092 case EXACT_DIV_EXPR:
1093 /* This is a shortcut for a common special case. */
1094 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1095 && ! TREE_CONSTANT_OVERFLOW (arg1)
1096 && ! TREE_CONSTANT_OVERFLOW (arg2)
1097 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1098 {
1099 if (code == CEIL_DIV_EXPR)
1100 int1l += int2l - 1;
1101
1102 low = int1l / int2l, hi = 0;
1103 break;
1104 }
1105
1106 /* ... fall through ... */
1107
1108 case ROUND_DIV_EXPR:
1109 if (int2h == 0 && int2l == 1)
1110 {
1111 low = int1l, hi = int1h;
1112 break;
1113 }
1114 if (int1l == int2l && int1h == int2h
1115 && ! (int1l == 0 && int1h == 0))
1116 {
1117 low = 1, hi = 0;
1118 break;
1119 }
1120 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1121 &low, &hi, &garbagel, &garbageh);
1122 break;
1123
1124 case TRUNC_MOD_EXPR:
1125 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1126 /* This is a shortcut for a common special case. */
1127 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1128 && ! TREE_CONSTANT_OVERFLOW (arg1)
1129 && ! TREE_CONSTANT_OVERFLOW (arg2)
1130 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1131 {
1132 if (code == CEIL_MOD_EXPR)
1133 int1l += int2l - 1;
1134 low = int1l % int2l, hi = 0;
1135 break;
1136 }
1137
1138 /* ... fall through ... */
1139
1140 case ROUND_MOD_EXPR:
1141 overflow = div_and_round_double (code, uns,
1142 int1l, int1h, int2l, int2h,
1143 &garbagel, &garbageh, &low, &hi);
1144 break;
1145
1146 case MIN_EXPR:
1147 case MAX_EXPR:
1148 if (uns)
1149 low = (((unsigned HOST_WIDE_INT) int1h
1150 < (unsigned HOST_WIDE_INT) int2h)
1151 || (((unsigned HOST_WIDE_INT) int1h
1152 == (unsigned HOST_WIDE_INT) int2h)
1153 && int1l < int2l));
1154 else
1155 low = (int1h < int2h
1156 || (int1h == int2h && int1l < int2l));
1157
1158 if (low == (code == MIN_EXPR))
1159 low = int1l, hi = int1h;
1160 else
1161 low = int2l, hi = int2h;
1162 break;
1163
1164 default:
1165 abort ();
1166 }
1167
1168 /* If this is for a sizetype, can be represented as one (signed)
1169 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1170 constants. */
1171 if (is_sizetype
1172 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1173 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1174 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1175 return size_int_type_wide (low, type);
1176 else
1177 {
1178 t = build_int_2 (low, hi);
1179 TREE_TYPE (t) = TREE_TYPE (arg1);
1180 }
1181
1182 TREE_OVERFLOW (t)
1183 = ((notrunc
1184 ? (!uns || is_sizetype) && overflow
1185 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1186 && ! no_overflow))
1187 | TREE_OVERFLOW (arg1)
1188 | TREE_OVERFLOW (arg2));
1189
1190 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1191 So check if force_fit_type truncated the value. */
1192 if (is_sizetype
1193 && ! TREE_OVERFLOW (t)
1194 && (TREE_INT_CST_HIGH (t) != hi
1195 || TREE_INT_CST_LOW (t) != low))
1196 TREE_OVERFLOW (t) = 1;
1197
1198 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1199 | TREE_CONSTANT_OVERFLOW (arg1)
1200 | TREE_CONSTANT_OVERFLOW (arg2));
1201 return t;
1202 }
1203
1204 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1205 constant. We assume ARG1 and ARG2 have the same data type, or at least
1206 are the same kind of constant and the same machine mode.
1207
1208 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1209
1210 static tree
1211 const_binop (code, arg1, arg2, notrunc)
1212 enum tree_code code;
1213 tree arg1, arg2;
1214 int notrunc;
1215 {
1216 STRIP_NOPS (arg1);
1217 STRIP_NOPS (arg2);
1218
1219 if (TREE_CODE (arg1) == INTEGER_CST)
1220 return int_const_binop (code, arg1, arg2, notrunc);
1221
1222 if (TREE_CODE (arg1) == REAL_CST)
1223 {
1224 REAL_VALUE_TYPE d1;
1225 REAL_VALUE_TYPE d2;
1226 REAL_VALUE_TYPE value;
1227 tree t;
1228
1229 d1 = TREE_REAL_CST (arg1);
1230 d2 = TREE_REAL_CST (arg2);
1231
1232 /* If either operand is a NaN, just return it. Otherwise, set up
1233 for floating-point trap; we return an overflow. */
1234 if (REAL_VALUE_ISNAN (d1))
1235 return arg1;
1236 else if (REAL_VALUE_ISNAN (d2))
1237 return arg2;
1238
1239 REAL_ARITHMETIC (value, code, d1, d2);
1240
1241 t = build_real (TREE_TYPE (arg1),
1242 real_value_truncate (TYPE_MODE (TREE_TYPE (arg1)),
1243 value));
1244
1245 TREE_OVERFLOW (t)
1246 = (force_fit_type (t, 0)
1247 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1248 TREE_CONSTANT_OVERFLOW (t)
1249 = TREE_OVERFLOW (t)
1250 | TREE_CONSTANT_OVERFLOW (arg1)
1251 | TREE_CONSTANT_OVERFLOW (arg2);
1252 return t;
1253 }
1254 if (TREE_CODE (arg1) == COMPLEX_CST)
1255 {
1256 tree type = TREE_TYPE (arg1);
1257 tree r1 = TREE_REALPART (arg1);
1258 tree i1 = TREE_IMAGPART (arg1);
1259 tree r2 = TREE_REALPART (arg2);
1260 tree i2 = TREE_IMAGPART (arg2);
1261 tree t;
1262
1263 switch (code)
1264 {
1265 case PLUS_EXPR:
1266 t = build_complex (type,
1267 const_binop (PLUS_EXPR, r1, r2, notrunc),
1268 const_binop (PLUS_EXPR, i1, i2, notrunc));
1269 break;
1270
1271 case MINUS_EXPR:
1272 t = build_complex (type,
1273 const_binop (MINUS_EXPR, r1, r2, notrunc),
1274 const_binop (MINUS_EXPR, i1, i2, notrunc));
1275 break;
1276
1277 case MULT_EXPR:
1278 t = build_complex (type,
1279 const_binop (MINUS_EXPR,
1280 const_binop (MULT_EXPR,
1281 r1, r2, notrunc),
1282 const_binop (MULT_EXPR,
1283 i1, i2, notrunc),
1284 notrunc),
1285 const_binop (PLUS_EXPR,
1286 const_binop (MULT_EXPR,
1287 r1, i2, notrunc),
1288 const_binop (MULT_EXPR,
1289 i1, r2, notrunc),
1290 notrunc));
1291 break;
1292
1293 case RDIV_EXPR:
1294 {
1295 tree magsquared
1296 = const_binop (PLUS_EXPR,
1297 const_binop (MULT_EXPR, r2, r2, notrunc),
1298 const_binop (MULT_EXPR, i2, i2, notrunc),
1299 notrunc);
1300
1301 t = build_complex (type,
1302 const_binop
1303 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1304 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1305 const_binop (PLUS_EXPR,
1306 const_binop (MULT_EXPR, r1, r2,
1307 notrunc),
1308 const_binop (MULT_EXPR, i1, i2,
1309 notrunc),
1310 notrunc),
1311 magsquared, notrunc),
1312 const_binop
1313 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1314 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1315 const_binop (MINUS_EXPR,
1316 const_binop (MULT_EXPR, i1, r2,
1317 notrunc),
1318 const_binop (MULT_EXPR, r1, i2,
1319 notrunc),
1320 notrunc),
1321 magsquared, notrunc));
1322 }
1323 break;
1324
1325 default:
1326 abort ();
1327 }
1328 return t;
1329 }
1330 return 0;
1331 }
1332
1333 /* These are the hash table functions for the hash table of INTEGER_CST
1334 nodes of a sizetype. */
1335
1336 /* Return the hash code code X, an INTEGER_CST. */
1337
1338 static hashval_t
1339 size_htab_hash (x)
1340 const void *x;
1341 {
1342 tree t = (tree) x;
1343
1344 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1345 ^ (hashval_t) ((long) TREE_TYPE (t) >> 3)
1346 ^ (TREE_OVERFLOW (t) << 20));
1347 }
1348
1349 /* Return non-zero if the value represented by *X (an INTEGER_CST tree node)
1350 is the same as that given by *Y, which is the same. */
1351
1352 static int
1353 size_htab_eq (x, y)
1354 const void *x;
1355 const void *y;
1356 {
1357 tree xt = (tree) x;
1358 tree yt = (tree) y;
1359
1360 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1361 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1362 && TREE_TYPE (xt) == TREE_TYPE (yt)
1363 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1364 }
1365 \f
1366 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1367 bits are given by NUMBER and of the sizetype represented by KIND. */
1368
1369 tree
1370 size_int_wide (number, kind)
1371 HOST_WIDE_INT number;
1372 enum size_type_kind kind;
1373 {
1374 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1375 }
1376
1377 /* Likewise, but the desired type is specified explicitly. */
1378
1379 static GTY (()) tree new_const;
1380 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1381 htab_t size_htab;
1382
1383 tree
1384 size_int_type_wide (number, type)
1385 HOST_WIDE_INT number;
1386 tree type;
1387 {
1388 PTR *slot;
1389
1390 if (size_htab == 0)
1391 {
1392 size_htab = htab_create (1024, size_htab_hash, size_htab_eq, NULL);
1393 new_const = make_node (INTEGER_CST);
1394 }
1395
1396 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1397 hash table, we return the value from the hash table. Otherwise, we
1398 place that in the hash table and make a new node for the next time. */
1399 TREE_INT_CST_LOW (new_const) = number;
1400 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1401 TREE_TYPE (new_const) = type;
1402 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1403 = force_fit_type (new_const, 0);
1404
1405 slot = htab_find_slot (size_htab, new_const, INSERT);
1406 if (*slot == 0)
1407 {
1408 tree t = new_const;
1409
1410 *slot = (PTR) new_const;
1411 new_const = make_node (INTEGER_CST);
1412 return t;
1413 }
1414 else
1415 return (tree) *slot;
1416 }
1417
1418 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1419 is a tree code. The type of the result is taken from the operands.
1420 Both must be the same type integer type and it must be a size type.
1421 If the operands are constant, so is the result. */
1422
1423 tree
1424 size_binop (code, arg0, arg1)
1425 enum tree_code code;
1426 tree arg0, arg1;
1427 {
1428 tree type = TREE_TYPE (arg0);
1429
1430 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1431 || type != TREE_TYPE (arg1))
1432 abort ();
1433
1434 /* Handle the special case of two integer constants faster. */
1435 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1436 {
1437 /* And some specific cases even faster than that. */
1438 if (code == PLUS_EXPR && integer_zerop (arg0))
1439 return arg1;
1440 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1441 && integer_zerop (arg1))
1442 return arg0;
1443 else if (code == MULT_EXPR && integer_onep (arg0))
1444 return arg1;
1445
1446 /* Handle general case of two integer constants. */
1447 return int_const_binop (code, arg0, arg1, 0);
1448 }
1449
1450 if (arg0 == error_mark_node || arg1 == error_mark_node)
1451 return error_mark_node;
1452
1453 return fold (build (code, type, arg0, arg1));
1454 }
1455
1456 /* Given two values, either both of sizetype or both of bitsizetype,
1457 compute the difference between the two values. Return the value
1458 in signed type corresponding to the type of the operands. */
1459
1460 tree
1461 size_diffop (arg0, arg1)
1462 tree arg0, arg1;
1463 {
1464 tree type = TREE_TYPE (arg0);
1465 tree ctype;
1466
1467 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1468 || type != TREE_TYPE (arg1))
1469 abort ();
1470
1471 /* If the type is already signed, just do the simple thing. */
1472 if (! TREE_UNSIGNED (type))
1473 return size_binop (MINUS_EXPR, arg0, arg1);
1474
1475 ctype = (type == bitsizetype || type == ubitsizetype
1476 ? sbitsizetype : ssizetype);
1477
1478 /* If either operand is not a constant, do the conversions to the signed
1479 type and subtract. The hardware will do the right thing with any
1480 overflow in the subtraction. */
1481 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1482 return size_binop (MINUS_EXPR, convert (ctype, arg0),
1483 convert (ctype, arg1));
1484
1485 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1486 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1487 overflow) and negate (which can't either). Special-case a result
1488 of zero while we're here. */
1489 if (tree_int_cst_equal (arg0, arg1))
1490 return convert (ctype, integer_zero_node);
1491 else if (tree_int_cst_lt (arg1, arg0))
1492 return convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1493 else
1494 return size_binop (MINUS_EXPR, convert (ctype, integer_zero_node),
1495 convert (ctype, size_binop (MINUS_EXPR, arg1, arg0)));
1496 }
1497 \f
1498
1499 /* Given T, a tree representing type conversion of ARG1, a constant,
1500 return a constant tree representing the result of conversion. */
1501
1502 static tree
1503 fold_convert (t, arg1)
1504 tree t;
1505 tree arg1;
1506 {
1507 tree type = TREE_TYPE (t);
1508 int overflow = 0;
1509
1510 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1511 {
1512 if (TREE_CODE (arg1) == INTEGER_CST)
1513 {
1514 /* If we would build a constant wider than GCC supports,
1515 leave the conversion unfolded. */
1516 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1517 return t;
1518
1519 /* If we are trying to make a sizetype for a small integer, use
1520 size_int to pick up cached types to reduce duplicate nodes. */
1521 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1522 && !TREE_CONSTANT_OVERFLOW (arg1)
1523 && compare_tree_int (arg1, 10000) < 0)
1524 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1525
1526 /* Given an integer constant, make new constant with new type,
1527 appropriately sign-extended or truncated. */
1528 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1529 TREE_INT_CST_HIGH (arg1));
1530 TREE_TYPE (t) = type;
1531 /* Indicate an overflow if (1) ARG1 already overflowed,
1532 or (2) force_fit_type indicates an overflow.
1533 Tell force_fit_type that an overflow has already occurred
1534 if ARG1 is a too-large unsigned value and T is signed.
1535 But don't indicate an overflow if converting a pointer. */
1536 TREE_OVERFLOW (t)
1537 = ((force_fit_type (t,
1538 (TREE_INT_CST_HIGH (arg1) < 0
1539 && (TREE_UNSIGNED (type)
1540 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1541 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1542 || TREE_OVERFLOW (arg1));
1543 TREE_CONSTANT_OVERFLOW (t)
1544 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1545 }
1546 else if (TREE_CODE (arg1) == REAL_CST)
1547 {
1548 /* Don't initialize these, use assignments.
1549 Initialized local aggregates don't work on old compilers. */
1550 REAL_VALUE_TYPE x;
1551 REAL_VALUE_TYPE l;
1552 REAL_VALUE_TYPE u;
1553 tree type1 = TREE_TYPE (arg1);
1554 int no_upper_bound;
1555
1556 x = TREE_REAL_CST (arg1);
1557 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1558
1559 no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
1560 if (!no_upper_bound)
1561 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1562
1563 /* See if X will be in range after truncation towards 0.
1564 To compensate for truncation, move the bounds away from 0,
1565 but reject if X exactly equals the adjusted bounds. */
1566 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1567 if (!no_upper_bound)
1568 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1569 /* If X is a NaN, use zero instead and show we have an overflow.
1570 Otherwise, range check. */
1571 if (REAL_VALUE_ISNAN (x))
1572 overflow = 1, x = dconst0;
1573 else if (! (REAL_VALUES_LESS (l, x)
1574 && !no_upper_bound
1575 && REAL_VALUES_LESS (x, u)))
1576 overflow = 1;
1577
1578 {
1579 HOST_WIDE_INT low, high;
1580 REAL_VALUE_TO_INT (&low, &high, x);
1581 t = build_int_2 (low, high);
1582 }
1583 TREE_TYPE (t) = type;
1584 TREE_OVERFLOW (t)
1585 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1586 TREE_CONSTANT_OVERFLOW (t)
1587 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1588 }
1589 TREE_TYPE (t) = type;
1590 }
1591 else if (TREE_CODE (type) == REAL_TYPE)
1592 {
1593 if (TREE_CODE (arg1) == INTEGER_CST)
1594 return build_real_from_int_cst (type, arg1);
1595 if (TREE_CODE (arg1) == REAL_CST)
1596 {
1597 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1598 {
1599 /* We make a copy of ARG1 so that we don't modify an
1600 existing constant tree. */
1601 t = copy_node (arg1);
1602 TREE_TYPE (t) = type;
1603 return t;
1604 }
1605
1606 t = build_real (type,
1607 real_value_truncate (TYPE_MODE (type),
1608 TREE_REAL_CST (arg1)));
1609
1610 TREE_OVERFLOW (t)
1611 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1612 TREE_CONSTANT_OVERFLOW (t)
1613 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1614 return t;
1615 }
1616 }
1617 TREE_CONSTANT (t) = 1;
1618 return t;
1619 }
1620 \f
1621 /* Return an expr equal to X but certainly not valid as an lvalue. */
1622
1623 tree
1624 non_lvalue (x)
1625 tree x;
1626 {
1627 tree result;
1628
1629 /* These things are certainly not lvalues. */
1630 if (TREE_CODE (x) == NON_LVALUE_EXPR
1631 || TREE_CODE (x) == INTEGER_CST
1632 || TREE_CODE (x) == REAL_CST
1633 || TREE_CODE (x) == STRING_CST
1634 || TREE_CODE (x) == ADDR_EXPR)
1635 return x;
1636
1637 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1638 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1639 return result;
1640 }
1641
1642 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1643 Zero means allow extended lvalues. */
1644
1645 int pedantic_lvalues;
1646
1647 /* When pedantic, return an expr equal to X but certainly not valid as a
1648 pedantic lvalue. Otherwise, return X. */
1649
1650 tree
1651 pedantic_non_lvalue (x)
1652 tree x;
1653 {
1654 if (pedantic_lvalues)
1655 return non_lvalue (x);
1656 else
1657 return x;
1658 }
1659 \f
1660 /* Given a tree comparison code, return the code that is the logical inverse
1661 of the given code. It is not safe to do this for floating-point
1662 comparisons, except for NE_EXPR and EQ_EXPR. */
1663
1664 static enum tree_code
1665 invert_tree_comparison (code)
1666 enum tree_code code;
1667 {
1668 switch (code)
1669 {
1670 case EQ_EXPR:
1671 return NE_EXPR;
1672 case NE_EXPR:
1673 return EQ_EXPR;
1674 case GT_EXPR:
1675 return LE_EXPR;
1676 case GE_EXPR:
1677 return LT_EXPR;
1678 case LT_EXPR:
1679 return GE_EXPR;
1680 case LE_EXPR:
1681 return GT_EXPR;
1682 default:
1683 abort ();
1684 }
1685 }
1686
1687 /* Similar, but return the comparison that results if the operands are
1688 swapped. This is safe for floating-point. */
1689
1690 static enum tree_code
1691 swap_tree_comparison (code)
1692 enum tree_code code;
1693 {
1694 switch (code)
1695 {
1696 case EQ_EXPR:
1697 case NE_EXPR:
1698 return code;
1699 case GT_EXPR:
1700 return LT_EXPR;
1701 case GE_EXPR:
1702 return LE_EXPR;
1703 case LT_EXPR:
1704 return GT_EXPR;
1705 case LE_EXPR:
1706 return GE_EXPR;
1707 default:
1708 abort ();
1709 }
1710 }
1711
1712 /* Return nonzero if CODE is a tree code that represents a truth value. */
1713
1714 static int
1715 truth_value_p (code)
1716 enum tree_code code;
1717 {
1718 return (TREE_CODE_CLASS (code) == '<'
1719 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1720 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1721 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1722 }
1723 \f
1724 /* Return nonzero if two operands are necessarily equal.
1725 If ONLY_CONST is non-zero, only return non-zero for constants.
1726 This function tests whether the operands are indistinguishable;
1727 it does not test whether they are equal using C's == operation.
1728 The distinction is important for IEEE floating point, because
1729 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1730 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1731
1732 int
1733 operand_equal_p (arg0, arg1, only_const)
1734 tree arg0, arg1;
1735 int only_const;
1736 {
1737 /* If both types don't have the same signedness, then we can't consider
1738 them equal. We must check this before the STRIP_NOPS calls
1739 because they may change the signedness of the arguments. */
1740 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1741 return 0;
1742
1743 STRIP_NOPS (arg0);
1744 STRIP_NOPS (arg1);
1745
1746 if (TREE_CODE (arg0) != TREE_CODE (arg1)
1747 /* This is needed for conversions and for COMPONENT_REF.
1748 Might as well play it safe and always test this. */
1749 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
1750 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
1751 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1752 return 0;
1753
1754 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1755 We don't care about side effects in that case because the SAVE_EXPR
1756 takes care of that for us. In all other cases, two expressions are
1757 equal if they have no side effects. If we have two identical
1758 expressions with side effects that should be treated the same due
1759 to the only side effects being identical SAVE_EXPR's, that will
1760 be detected in the recursive calls below. */
1761 if (arg0 == arg1 && ! only_const
1762 && (TREE_CODE (arg0) == SAVE_EXPR
1763 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
1764 return 1;
1765
1766 /* Next handle constant cases, those for which we can return 1 even
1767 if ONLY_CONST is set. */
1768 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
1769 switch (TREE_CODE (arg0))
1770 {
1771 case INTEGER_CST:
1772 return (! TREE_CONSTANT_OVERFLOW (arg0)
1773 && ! TREE_CONSTANT_OVERFLOW (arg1)
1774 && tree_int_cst_equal (arg0, arg1));
1775
1776 case REAL_CST:
1777 return (! TREE_CONSTANT_OVERFLOW (arg0)
1778 && ! TREE_CONSTANT_OVERFLOW (arg1)
1779 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
1780 TREE_REAL_CST (arg1)));
1781
1782 case VECTOR_CST:
1783 {
1784 tree v1, v2;
1785
1786 if (TREE_CONSTANT_OVERFLOW (arg0)
1787 || TREE_CONSTANT_OVERFLOW (arg1))
1788 return 0;
1789
1790 v1 = TREE_VECTOR_CST_ELTS (arg0);
1791 v2 = TREE_VECTOR_CST_ELTS (arg1);
1792 while (v1 && v2)
1793 {
1794 if (!operand_equal_p (v1, v2, only_const))
1795 return 0;
1796 v1 = TREE_CHAIN (v1);
1797 v2 = TREE_CHAIN (v2);
1798 }
1799
1800 return 1;
1801 }
1802
1803 case COMPLEX_CST:
1804 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
1805 only_const)
1806 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
1807 only_const));
1808
1809 case STRING_CST:
1810 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
1811 && ! memcmp (TREE_STRING_POINTER (arg0),
1812 TREE_STRING_POINTER (arg1),
1813 TREE_STRING_LENGTH (arg0)));
1814
1815 case ADDR_EXPR:
1816 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
1817 0);
1818 default:
1819 break;
1820 }
1821
1822 if (only_const)
1823 return 0;
1824
1825 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1826 {
1827 case '1':
1828 /* Two conversions are equal only if signedness and modes match. */
1829 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1830 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1831 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1832 return 0;
1833
1834 return operand_equal_p (TREE_OPERAND (arg0, 0),
1835 TREE_OPERAND (arg1, 0), 0);
1836
1837 case '<':
1838 case '2':
1839 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
1840 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
1841 0))
1842 return 1;
1843
1844 /* For commutative ops, allow the other order. */
1845 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
1846 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
1847 || TREE_CODE (arg0) == BIT_IOR_EXPR
1848 || TREE_CODE (arg0) == BIT_XOR_EXPR
1849 || TREE_CODE (arg0) == BIT_AND_EXPR
1850 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
1851 && operand_equal_p (TREE_OPERAND (arg0, 0),
1852 TREE_OPERAND (arg1, 1), 0)
1853 && operand_equal_p (TREE_OPERAND (arg0, 1),
1854 TREE_OPERAND (arg1, 0), 0));
1855
1856 case 'r':
1857 /* If either of the pointer (or reference) expressions we are dereferencing
1858 contain a side effect, these cannot be equal. */
1859 if (TREE_SIDE_EFFECTS (arg0)
1860 || TREE_SIDE_EFFECTS (arg1))
1861 return 0;
1862
1863 switch (TREE_CODE (arg0))
1864 {
1865 case INDIRECT_REF:
1866 return operand_equal_p (TREE_OPERAND (arg0, 0),
1867 TREE_OPERAND (arg1, 0), 0);
1868
1869 case COMPONENT_REF:
1870 case ARRAY_REF:
1871 case ARRAY_RANGE_REF:
1872 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1873 TREE_OPERAND (arg1, 0), 0)
1874 && operand_equal_p (TREE_OPERAND (arg0, 1),
1875 TREE_OPERAND (arg1, 1), 0));
1876
1877 case BIT_FIELD_REF:
1878 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1879 TREE_OPERAND (arg1, 0), 0)
1880 && operand_equal_p (TREE_OPERAND (arg0, 1),
1881 TREE_OPERAND (arg1, 1), 0)
1882 && operand_equal_p (TREE_OPERAND (arg0, 2),
1883 TREE_OPERAND (arg1, 2), 0));
1884 default:
1885 return 0;
1886 }
1887
1888 case 'e':
1889 if (TREE_CODE (arg0) == RTL_EXPR)
1890 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
1891 return 0;
1892
1893 default:
1894 return 0;
1895 }
1896 }
1897 \f
1898 /* Similar to operand_equal_p, but see if ARG0 might have been made by
1899 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1900
1901 When in doubt, return 0. */
1902
1903 static int
1904 operand_equal_for_comparison_p (arg0, arg1, other)
1905 tree arg0, arg1;
1906 tree other;
1907 {
1908 int unsignedp1, unsignedpo;
1909 tree primarg0, primarg1, primother;
1910 unsigned int correct_width;
1911
1912 if (operand_equal_p (arg0, arg1, 0))
1913 return 1;
1914
1915 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
1916 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
1917 return 0;
1918
1919 /* Discard any conversions that don't change the modes of ARG0 and ARG1
1920 and see if the inner values are the same. This removes any
1921 signedness comparison, which doesn't matter here. */
1922 primarg0 = arg0, primarg1 = arg1;
1923 STRIP_NOPS (primarg0);
1924 STRIP_NOPS (primarg1);
1925 if (operand_equal_p (primarg0, primarg1, 0))
1926 return 1;
1927
1928 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
1929 actual comparison operand, ARG0.
1930
1931 First throw away any conversions to wider types
1932 already present in the operands. */
1933
1934 primarg1 = get_narrower (arg1, &unsignedp1);
1935 primother = get_narrower (other, &unsignedpo);
1936
1937 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
1938 if (unsignedp1 == unsignedpo
1939 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
1940 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
1941 {
1942 tree type = TREE_TYPE (arg0);
1943
1944 /* Make sure shorter operand is extended the right way
1945 to match the longer operand. */
1946 primarg1 = convert ((*lang_hooks.types.signed_or_unsigned_type)
1947 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
1948
1949 if (operand_equal_p (arg0, convert (type, primarg1), 0))
1950 return 1;
1951 }
1952
1953 return 0;
1954 }
1955 \f
1956 /* See if ARG is an expression that is either a comparison or is performing
1957 arithmetic on comparisons. The comparisons must only be comparing
1958 two different values, which will be stored in *CVAL1 and *CVAL2; if
1959 they are non-zero it means that some operands have already been found.
1960 No variables may be used anywhere else in the expression except in the
1961 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
1962 the expression and save_expr needs to be called with CVAL1 and CVAL2.
1963
1964 If this is true, return 1. Otherwise, return zero. */
1965
1966 static int
1967 twoval_comparison_p (arg, cval1, cval2, save_p)
1968 tree arg;
1969 tree *cval1, *cval2;
1970 int *save_p;
1971 {
1972 enum tree_code code = TREE_CODE (arg);
1973 char class = TREE_CODE_CLASS (code);
1974
1975 /* We can handle some of the 'e' cases here. */
1976 if (class == 'e' && code == TRUTH_NOT_EXPR)
1977 class = '1';
1978 else if (class == 'e'
1979 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
1980 || code == COMPOUND_EXPR))
1981 class = '2';
1982
1983 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
1984 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
1985 {
1986 /* If we've already found a CVAL1 or CVAL2, this expression is
1987 two complex to handle. */
1988 if (*cval1 || *cval2)
1989 return 0;
1990
1991 class = '1';
1992 *save_p = 1;
1993 }
1994
1995 switch (class)
1996 {
1997 case '1':
1998 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
1999
2000 case '2':
2001 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2002 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2003 cval1, cval2, save_p));
2004
2005 case 'c':
2006 return 1;
2007
2008 case 'e':
2009 if (code == COND_EXPR)
2010 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2011 cval1, cval2, save_p)
2012 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2013 cval1, cval2, save_p)
2014 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2015 cval1, cval2, save_p));
2016 return 0;
2017
2018 case '<':
2019 /* First see if we can handle the first operand, then the second. For
2020 the second operand, we know *CVAL1 can't be zero. It must be that
2021 one side of the comparison is each of the values; test for the
2022 case where this isn't true by failing if the two operands
2023 are the same. */
2024
2025 if (operand_equal_p (TREE_OPERAND (arg, 0),
2026 TREE_OPERAND (arg, 1), 0))
2027 return 0;
2028
2029 if (*cval1 == 0)
2030 *cval1 = TREE_OPERAND (arg, 0);
2031 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2032 ;
2033 else if (*cval2 == 0)
2034 *cval2 = TREE_OPERAND (arg, 0);
2035 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2036 ;
2037 else
2038 return 0;
2039
2040 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2041 ;
2042 else if (*cval2 == 0)
2043 *cval2 = TREE_OPERAND (arg, 1);
2044 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2045 ;
2046 else
2047 return 0;
2048
2049 return 1;
2050
2051 default:
2052 return 0;
2053 }
2054 }
2055 \f
2056 /* ARG is a tree that is known to contain just arithmetic operations and
2057 comparisons. Evaluate the operations in the tree substituting NEW0 for
2058 any occurrence of OLD0 as an operand of a comparison and likewise for
2059 NEW1 and OLD1. */
2060
2061 static tree
2062 eval_subst (arg, old0, new0, old1, new1)
2063 tree arg;
2064 tree old0, new0, old1, new1;
2065 {
2066 tree type = TREE_TYPE (arg);
2067 enum tree_code code = TREE_CODE (arg);
2068 char class = TREE_CODE_CLASS (code);
2069
2070 /* We can handle some of the 'e' cases here. */
2071 if (class == 'e' && code == TRUTH_NOT_EXPR)
2072 class = '1';
2073 else if (class == 'e'
2074 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2075 class = '2';
2076
2077 switch (class)
2078 {
2079 case '1':
2080 return fold (build1 (code, type,
2081 eval_subst (TREE_OPERAND (arg, 0),
2082 old0, new0, old1, new1)));
2083
2084 case '2':
2085 return fold (build (code, type,
2086 eval_subst (TREE_OPERAND (arg, 0),
2087 old0, new0, old1, new1),
2088 eval_subst (TREE_OPERAND (arg, 1),
2089 old0, new0, old1, new1)));
2090
2091 case 'e':
2092 switch (code)
2093 {
2094 case SAVE_EXPR:
2095 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2096
2097 case COMPOUND_EXPR:
2098 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2099
2100 case COND_EXPR:
2101 return fold (build (code, type,
2102 eval_subst (TREE_OPERAND (arg, 0),
2103 old0, new0, old1, new1),
2104 eval_subst (TREE_OPERAND (arg, 1),
2105 old0, new0, old1, new1),
2106 eval_subst (TREE_OPERAND (arg, 2),
2107 old0, new0, old1, new1)));
2108 default:
2109 break;
2110 }
2111 /* fall through - ??? */
2112
2113 case '<':
2114 {
2115 tree arg0 = TREE_OPERAND (arg, 0);
2116 tree arg1 = TREE_OPERAND (arg, 1);
2117
2118 /* We need to check both for exact equality and tree equality. The
2119 former will be true if the operand has a side-effect. In that
2120 case, we know the operand occurred exactly once. */
2121
2122 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2123 arg0 = new0;
2124 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2125 arg0 = new1;
2126
2127 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2128 arg1 = new0;
2129 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2130 arg1 = new1;
2131
2132 return fold (build (code, type, arg0, arg1));
2133 }
2134
2135 default:
2136 return arg;
2137 }
2138 }
2139 \f
2140 /* Return a tree for the case when the result of an expression is RESULT
2141 converted to TYPE and OMITTED was previously an operand of the expression
2142 but is now not needed (e.g., we folded OMITTED * 0).
2143
2144 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2145 the conversion of RESULT to TYPE. */
2146
2147 static tree
2148 omit_one_operand (type, result, omitted)
2149 tree type, result, omitted;
2150 {
2151 tree t = convert (type, result);
2152
2153 if (TREE_SIDE_EFFECTS (omitted))
2154 return build (COMPOUND_EXPR, type, omitted, t);
2155
2156 return non_lvalue (t);
2157 }
2158
2159 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2160
2161 static tree
2162 pedantic_omit_one_operand (type, result, omitted)
2163 tree type, result, omitted;
2164 {
2165 tree t = convert (type, result);
2166
2167 if (TREE_SIDE_EFFECTS (omitted))
2168 return build (COMPOUND_EXPR, type, omitted, t);
2169
2170 return pedantic_non_lvalue (t);
2171 }
2172 \f
2173 /* Return a simplified tree node for the truth-negation of ARG. This
2174 never alters ARG itself. We assume that ARG is an operation that
2175 returns a truth value (0 or 1). */
2176
2177 tree
2178 invert_truthvalue (arg)
2179 tree arg;
2180 {
2181 tree type = TREE_TYPE (arg);
2182 enum tree_code code = TREE_CODE (arg);
2183
2184 if (code == ERROR_MARK)
2185 return arg;
2186
2187 /* If this is a comparison, we can simply invert it, except for
2188 floating-point non-equality comparisons, in which case we just
2189 enclose a TRUTH_NOT_EXPR around what we have. */
2190
2191 if (TREE_CODE_CLASS (code) == '<')
2192 {
2193 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2194 && !flag_unsafe_math_optimizations
2195 && code != NE_EXPR
2196 && code != EQ_EXPR)
2197 return build1 (TRUTH_NOT_EXPR, type, arg);
2198 else
2199 return build (invert_tree_comparison (code), type,
2200 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2201 }
2202
2203 switch (code)
2204 {
2205 case INTEGER_CST:
2206 return convert (type, build_int_2 (integer_zerop (arg), 0));
2207
2208 case TRUTH_AND_EXPR:
2209 return build (TRUTH_OR_EXPR, type,
2210 invert_truthvalue (TREE_OPERAND (arg, 0)),
2211 invert_truthvalue (TREE_OPERAND (arg, 1)));
2212
2213 case TRUTH_OR_EXPR:
2214 return build (TRUTH_AND_EXPR, type,
2215 invert_truthvalue (TREE_OPERAND (arg, 0)),
2216 invert_truthvalue (TREE_OPERAND (arg, 1)));
2217
2218 case TRUTH_XOR_EXPR:
2219 /* Here we can invert either operand. We invert the first operand
2220 unless the second operand is a TRUTH_NOT_EXPR in which case our
2221 result is the XOR of the first operand with the inside of the
2222 negation of the second operand. */
2223
2224 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2225 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2226 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2227 else
2228 return build (TRUTH_XOR_EXPR, type,
2229 invert_truthvalue (TREE_OPERAND (arg, 0)),
2230 TREE_OPERAND (arg, 1));
2231
2232 case TRUTH_ANDIF_EXPR:
2233 return build (TRUTH_ORIF_EXPR, type,
2234 invert_truthvalue (TREE_OPERAND (arg, 0)),
2235 invert_truthvalue (TREE_OPERAND (arg, 1)));
2236
2237 case TRUTH_ORIF_EXPR:
2238 return build (TRUTH_ANDIF_EXPR, type,
2239 invert_truthvalue (TREE_OPERAND (arg, 0)),
2240 invert_truthvalue (TREE_OPERAND (arg, 1)));
2241
2242 case TRUTH_NOT_EXPR:
2243 return TREE_OPERAND (arg, 0);
2244
2245 case COND_EXPR:
2246 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2247 invert_truthvalue (TREE_OPERAND (arg, 1)),
2248 invert_truthvalue (TREE_OPERAND (arg, 2)));
2249
2250 case COMPOUND_EXPR:
2251 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2252 invert_truthvalue (TREE_OPERAND (arg, 1)));
2253
2254 case WITH_RECORD_EXPR:
2255 return build (WITH_RECORD_EXPR, type,
2256 invert_truthvalue (TREE_OPERAND (arg, 0)),
2257 TREE_OPERAND (arg, 1));
2258
2259 case NON_LVALUE_EXPR:
2260 return invert_truthvalue (TREE_OPERAND (arg, 0));
2261
2262 case NOP_EXPR:
2263 case CONVERT_EXPR:
2264 case FLOAT_EXPR:
2265 return build1 (TREE_CODE (arg), type,
2266 invert_truthvalue (TREE_OPERAND (arg, 0)));
2267
2268 case BIT_AND_EXPR:
2269 if (!integer_onep (TREE_OPERAND (arg, 1)))
2270 break;
2271 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2272
2273 case SAVE_EXPR:
2274 return build1 (TRUTH_NOT_EXPR, type, arg);
2275
2276 case CLEANUP_POINT_EXPR:
2277 return build1 (CLEANUP_POINT_EXPR, type,
2278 invert_truthvalue (TREE_OPERAND (arg, 0)));
2279
2280 default:
2281 break;
2282 }
2283 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2284 abort ();
2285 return build1 (TRUTH_NOT_EXPR, type, arg);
2286 }
2287
2288 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2289 operands are another bit-wise operation with a common input. If so,
2290 distribute the bit operations to save an operation and possibly two if
2291 constants are involved. For example, convert
2292 (A | B) & (A | C) into A | (B & C)
2293 Further simplification will occur if B and C are constants.
2294
2295 If this optimization cannot be done, 0 will be returned. */
2296
2297 static tree
2298 distribute_bit_expr (code, type, arg0, arg1)
2299 enum tree_code code;
2300 tree type;
2301 tree arg0, arg1;
2302 {
2303 tree common;
2304 tree left, right;
2305
2306 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2307 || TREE_CODE (arg0) == code
2308 || (TREE_CODE (arg0) != BIT_AND_EXPR
2309 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2310 return 0;
2311
2312 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2313 {
2314 common = TREE_OPERAND (arg0, 0);
2315 left = TREE_OPERAND (arg0, 1);
2316 right = TREE_OPERAND (arg1, 1);
2317 }
2318 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2319 {
2320 common = TREE_OPERAND (arg0, 0);
2321 left = TREE_OPERAND (arg0, 1);
2322 right = TREE_OPERAND (arg1, 0);
2323 }
2324 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2325 {
2326 common = TREE_OPERAND (arg0, 1);
2327 left = TREE_OPERAND (arg0, 0);
2328 right = TREE_OPERAND (arg1, 1);
2329 }
2330 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2331 {
2332 common = TREE_OPERAND (arg0, 1);
2333 left = TREE_OPERAND (arg0, 0);
2334 right = TREE_OPERAND (arg1, 0);
2335 }
2336 else
2337 return 0;
2338
2339 return fold (build (TREE_CODE (arg0), type, common,
2340 fold (build (code, type, left, right))));
2341 }
2342 \f
2343 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2344 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2345
2346 static tree
2347 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2348 tree inner;
2349 tree type;
2350 int bitsize, bitpos;
2351 int unsignedp;
2352 {
2353 tree result = build (BIT_FIELD_REF, type, inner,
2354 size_int (bitsize), bitsize_int (bitpos));
2355
2356 TREE_UNSIGNED (result) = unsignedp;
2357
2358 return result;
2359 }
2360
2361 /* Optimize a bit-field compare.
2362
2363 There are two cases: First is a compare against a constant and the
2364 second is a comparison of two items where the fields are at the same
2365 bit position relative to the start of a chunk (byte, halfword, word)
2366 large enough to contain it. In these cases we can avoid the shift
2367 implicit in bitfield extractions.
2368
2369 For constants, we emit a compare of the shifted constant with the
2370 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2371 compared. For two fields at the same position, we do the ANDs with the
2372 similar mask and compare the result of the ANDs.
2373
2374 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2375 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2376 are the left and right operands of the comparison, respectively.
2377
2378 If the optimization described above can be done, we return the resulting
2379 tree. Otherwise we return zero. */
2380
2381 static tree
2382 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2383 enum tree_code code;
2384 tree compare_type;
2385 tree lhs, rhs;
2386 {
2387 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2388 tree type = TREE_TYPE (lhs);
2389 tree signed_type, unsigned_type;
2390 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2391 enum machine_mode lmode, rmode, nmode;
2392 int lunsignedp, runsignedp;
2393 int lvolatilep = 0, rvolatilep = 0;
2394 tree linner, rinner = NULL_TREE;
2395 tree mask;
2396 tree offset;
2397
2398 /* Get all the information about the extractions being done. If the bit size
2399 if the same as the size of the underlying object, we aren't doing an
2400 extraction at all and so can do nothing. We also don't want to
2401 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2402 then will no longer be able to replace it. */
2403 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2404 &lunsignedp, &lvolatilep);
2405 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2406 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2407 return 0;
2408
2409 if (!const_p)
2410 {
2411 /* If this is not a constant, we can only do something if bit positions,
2412 sizes, and signedness are the same. */
2413 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2414 &runsignedp, &rvolatilep);
2415
2416 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2417 || lunsignedp != runsignedp || offset != 0
2418 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2419 return 0;
2420 }
2421
2422 /* See if we can find a mode to refer to this field. We should be able to,
2423 but fail if we can't. */
2424 nmode = get_best_mode (lbitsize, lbitpos,
2425 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2426 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2427 TYPE_ALIGN (TREE_TYPE (rinner))),
2428 word_mode, lvolatilep || rvolatilep);
2429 if (nmode == VOIDmode)
2430 return 0;
2431
2432 /* Set signed and unsigned types of the precision of this mode for the
2433 shifts below. */
2434 signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2435 unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2436
2437 /* Compute the bit position and size for the new reference and our offset
2438 within it. If the new reference is the same size as the original, we
2439 won't optimize anything, so return zero. */
2440 nbitsize = GET_MODE_BITSIZE (nmode);
2441 nbitpos = lbitpos & ~ (nbitsize - 1);
2442 lbitpos -= nbitpos;
2443 if (nbitsize == lbitsize)
2444 return 0;
2445
2446 if (BYTES_BIG_ENDIAN)
2447 lbitpos = nbitsize - lbitsize - lbitpos;
2448
2449 /* Make the mask to be used against the extracted field. */
2450 mask = build_int_2 (~0, ~0);
2451 TREE_TYPE (mask) = unsigned_type;
2452 force_fit_type (mask, 0);
2453 mask = convert (unsigned_type, mask);
2454 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2455 mask = const_binop (RSHIFT_EXPR, mask,
2456 size_int (nbitsize - lbitsize - lbitpos), 0);
2457
2458 if (! const_p)
2459 /* If not comparing with constant, just rework the comparison
2460 and return. */
2461 return build (code, compare_type,
2462 build (BIT_AND_EXPR, unsigned_type,
2463 make_bit_field_ref (linner, unsigned_type,
2464 nbitsize, nbitpos, 1),
2465 mask),
2466 build (BIT_AND_EXPR, unsigned_type,
2467 make_bit_field_ref (rinner, unsigned_type,
2468 nbitsize, nbitpos, 1),
2469 mask));
2470
2471 /* Otherwise, we are handling the constant case. See if the constant is too
2472 big for the field. Warn and return a tree of for 0 (false) if so. We do
2473 this not only for its own sake, but to avoid having to test for this
2474 error case below. If we didn't, we might generate wrong code.
2475
2476 For unsigned fields, the constant shifted right by the field length should
2477 be all zero. For signed fields, the high-order bits should agree with
2478 the sign bit. */
2479
2480 if (lunsignedp)
2481 {
2482 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2483 convert (unsigned_type, rhs),
2484 size_int (lbitsize), 0)))
2485 {
2486 warning ("comparison is always %d due to width of bit-field",
2487 code == NE_EXPR);
2488 return convert (compare_type,
2489 (code == NE_EXPR
2490 ? integer_one_node : integer_zero_node));
2491 }
2492 }
2493 else
2494 {
2495 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2496 size_int (lbitsize - 1), 0);
2497 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2498 {
2499 warning ("comparison is always %d due to width of bit-field",
2500 code == NE_EXPR);
2501 return convert (compare_type,
2502 (code == NE_EXPR
2503 ? integer_one_node : integer_zero_node));
2504 }
2505 }
2506
2507 /* Single-bit compares should always be against zero. */
2508 if (lbitsize == 1 && ! integer_zerop (rhs))
2509 {
2510 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2511 rhs = convert (type, integer_zero_node);
2512 }
2513
2514 /* Make a new bitfield reference, shift the constant over the
2515 appropriate number of bits and mask it with the computed mask
2516 (in case this was a signed field). If we changed it, make a new one. */
2517 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2518 if (lvolatilep)
2519 {
2520 TREE_SIDE_EFFECTS (lhs) = 1;
2521 TREE_THIS_VOLATILE (lhs) = 1;
2522 }
2523
2524 rhs = fold (const_binop (BIT_AND_EXPR,
2525 const_binop (LSHIFT_EXPR,
2526 convert (unsigned_type, rhs),
2527 size_int (lbitpos), 0),
2528 mask, 0));
2529
2530 return build (code, compare_type,
2531 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2532 rhs);
2533 }
2534 \f
2535 /* Subroutine for fold_truthop: decode a field reference.
2536
2537 If EXP is a comparison reference, we return the innermost reference.
2538
2539 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2540 set to the starting bit number.
2541
2542 If the innermost field can be completely contained in a mode-sized
2543 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2544
2545 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2546 otherwise it is not changed.
2547
2548 *PUNSIGNEDP is set to the signedness of the field.
2549
2550 *PMASK is set to the mask used. This is either contained in a
2551 BIT_AND_EXPR or derived from the width of the field.
2552
2553 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2554
2555 Return 0 if this is not a component reference or is one that we can't
2556 do anything with. */
2557
2558 static tree
2559 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
2560 pvolatilep, pmask, pand_mask)
2561 tree exp;
2562 HOST_WIDE_INT *pbitsize, *pbitpos;
2563 enum machine_mode *pmode;
2564 int *punsignedp, *pvolatilep;
2565 tree *pmask;
2566 tree *pand_mask;
2567 {
2568 tree and_mask = 0;
2569 tree mask, inner, offset;
2570 tree unsigned_type;
2571 unsigned int precision;
2572
2573 /* All the optimizations using this function assume integer fields.
2574 There are problems with FP fields since the type_for_size call
2575 below can fail for, e.g., XFmode. */
2576 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2577 return 0;
2578
2579 STRIP_NOPS (exp);
2580
2581 if (TREE_CODE (exp) == BIT_AND_EXPR)
2582 {
2583 and_mask = TREE_OPERAND (exp, 1);
2584 exp = TREE_OPERAND (exp, 0);
2585 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2586 if (TREE_CODE (and_mask) != INTEGER_CST)
2587 return 0;
2588 }
2589
2590 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2591 punsignedp, pvolatilep);
2592 if ((inner == exp && and_mask == 0)
2593 || *pbitsize < 0 || offset != 0
2594 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
2595 return 0;
2596
2597 /* Compute the mask to access the bitfield. */
2598 unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
2599 precision = TYPE_PRECISION (unsigned_type);
2600
2601 mask = build_int_2 (~0, ~0);
2602 TREE_TYPE (mask) = unsigned_type;
2603 force_fit_type (mask, 0);
2604 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2605 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2606
2607 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2608 if (and_mask != 0)
2609 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2610 convert (unsigned_type, and_mask), mask));
2611
2612 *pmask = mask;
2613 *pand_mask = and_mask;
2614 return inner;
2615 }
2616
2617 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
2618 bit positions. */
2619
2620 static int
2621 all_ones_mask_p (mask, size)
2622 tree mask;
2623 int size;
2624 {
2625 tree type = TREE_TYPE (mask);
2626 unsigned int precision = TYPE_PRECISION (type);
2627 tree tmask;
2628
2629 tmask = build_int_2 (~0, ~0);
2630 TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
2631 force_fit_type (tmask, 0);
2632 return
2633 tree_int_cst_equal (mask,
2634 const_binop (RSHIFT_EXPR,
2635 const_binop (LSHIFT_EXPR, tmask,
2636 size_int (precision - size),
2637 0),
2638 size_int (precision - size), 0));
2639 }
2640
2641 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
2642 represents the sign bit of EXP's type. If EXP represents a sign
2643 or zero extension, also test VAL against the unextended type.
2644 The return value is the (sub)expression whose sign bit is VAL,
2645 or NULL_TREE otherwise. */
2646
2647 static tree
2648 sign_bit_p (exp, val)
2649 tree exp;
2650 tree val;
2651 {
2652 unsigned HOST_WIDE_INT lo;
2653 HOST_WIDE_INT hi;
2654 int width;
2655 tree t;
2656
2657 /* Tree EXP must have a integral type. */
2658 t = TREE_TYPE (exp);
2659 if (! INTEGRAL_TYPE_P (t))
2660 return NULL_TREE;
2661
2662 /* Tree VAL must be an integer constant. */
2663 if (TREE_CODE (val) != INTEGER_CST
2664 || TREE_CONSTANT_OVERFLOW (val))
2665 return NULL_TREE;
2666
2667 width = TYPE_PRECISION (t);
2668 if (width > HOST_BITS_PER_WIDE_INT)
2669 {
2670 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
2671 lo = 0;
2672 }
2673 else
2674 {
2675 hi = 0;
2676 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
2677 }
2678
2679 if (TREE_INT_CST_HIGH (val) == hi && TREE_INT_CST_LOW (val) == lo)
2680 return exp;
2681
2682 /* Handle extension from a narrower type. */
2683 if (TREE_CODE (exp) == NOP_EXPR
2684 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
2685 return sign_bit_p (TREE_OPERAND (exp, 0), val);
2686
2687 return NULL_TREE;
2688 }
2689
2690 /* Subroutine for fold_truthop: determine if an operand is simple enough
2691 to be evaluated unconditionally. */
2692
2693 static int
2694 simple_operand_p (exp)
2695 tree exp;
2696 {
2697 /* Strip any conversions that don't change the machine mode. */
2698 while ((TREE_CODE (exp) == NOP_EXPR
2699 || TREE_CODE (exp) == CONVERT_EXPR)
2700 && (TYPE_MODE (TREE_TYPE (exp))
2701 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2702 exp = TREE_OPERAND (exp, 0);
2703
2704 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2705 || (DECL_P (exp)
2706 && ! TREE_ADDRESSABLE (exp)
2707 && ! TREE_THIS_VOLATILE (exp)
2708 && ! DECL_NONLOCAL (exp)
2709 /* Don't regard global variables as simple. They may be
2710 allocated in ways unknown to the compiler (shared memory,
2711 #pragma weak, etc). */
2712 && ! TREE_PUBLIC (exp)
2713 && ! DECL_EXTERNAL (exp)
2714 /* Loading a static variable is unduly expensive, but global
2715 registers aren't expensive. */
2716 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2717 }
2718 \f
2719 /* The following functions are subroutines to fold_range_test and allow it to
2720 try to change a logical combination of comparisons into a range test.
2721
2722 For example, both
2723 X == 2 || X == 3 || X == 4 || X == 5
2724 and
2725 X >= 2 && X <= 5
2726 are converted to
2727 (unsigned) (X - 2) <= 3
2728
2729 We describe each set of comparisons as being either inside or outside
2730 a range, using a variable named like IN_P, and then describe the
2731 range with a lower and upper bound. If one of the bounds is omitted,
2732 it represents either the highest or lowest value of the type.
2733
2734 In the comments below, we represent a range by two numbers in brackets
2735 preceded by a "+" to designate being inside that range, or a "-" to
2736 designate being outside that range, so the condition can be inverted by
2737 flipping the prefix. An omitted bound is represented by a "-". For
2738 example, "- [-, 10]" means being outside the range starting at the lowest
2739 possible value and ending at 10, in other words, being greater than 10.
2740 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
2741 always false.
2742
2743 We set up things so that the missing bounds are handled in a consistent
2744 manner so neither a missing bound nor "true" and "false" need to be
2745 handled using a special case. */
2746
2747 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
2748 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
2749 and UPPER1_P are nonzero if the respective argument is an upper bound
2750 and zero for a lower. TYPE, if nonzero, is the type of the result; it
2751 must be specified for a comparison. ARG1 will be converted to ARG0's
2752 type if both are specified. */
2753
2754 static tree
2755 range_binop (code, type, arg0, upper0_p, arg1, upper1_p)
2756 enum tree_code code;
2757 tree type;
2758 tree arg0, arg1;
2759 int upper0_p, upper1_p;
2760 {
2761 tree tem;
2762 int result;
2763 int sgn0, sgn1;
2764
2765 /* If neither arg represents infinity, do the normal operation.
2766 Else, if not a comparison, return infinity. Else handle the special
2767 comparison rules. Note that most of the cases below won't occur, but
2768 are handled for consistency. */
2769
2770 if (arg0 != 0 && arg1 != 0)
2771 {
2772 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
2773 arg0, convert (TREE_TYPE (arg0), arg1)));
2774 STRIP_NOPS (tem);
2775 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
2776 }
2777
2778 if (TREE_CODE_CLASS (code) != '<')
2779 return 0;
2780
2781 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
2782 for neither. In real maths, we cannot assume open ended ranges are
2783 the same. But, this is computer arithmetic, where numbers are finite.
2784 We can therefore make the transformation of any unbounded range with
2785 the value Z, Z being greater than any representable number. This permits
2786 us to treat unbounded ranges as equal. */
2787 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
2788 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
2789 switch (code)
2790 {
2791 case EQ_EXPR:
2792 result = sgn0 == sgn1;
2793 break;
2794 case NE_EXPR:
2795 result = sgn0 != sgn1;
2796 break;
2797 case LT_EXPR:
2798 result = sgn0 < sgn1;
2799 break;
2800 case LE_EXPR:
2801 result = sgn0 <= sgn1;
2802 break;
2803 case GT_EXPR:
2804 result = sgn0 > sgn1;
2805 break;
2806 case GE_EXPR:
2807 result = sgn0 >= sgn1;
2808 break;
2809 default:
2810 abort ();
2811 }
2812
2813 return convert (type, result ? integer_one_node : integer_zero_node);
2814 }
2815 \f
2816 /* Given EXP, a logical expression, set the range it is testing into
2817 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
2818 actually being tested. *PLOW and *PHIGH will be made of the same type
2819 as the returned expression. If EXP is not a comparison, we will most
2820 likely not be returning a useful value and range. */
2821
2822 static tree
2823 make_range (exp, pin_p, plow, phigh)
2824 tree exp;
2825 int *pin_p;
2826 tree *plow, *phigh;
2827 {
2828 enum tree_code code;
2829 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
2830 tree orig_type = NULL_TREE;
2831 int in_p, n_in_p;
2832 tree low, high, n_low, n_high;
2833
2834 /* Start with simply saying "EXP != 0" and then look at the code of EXP
2835 and see if we can refine the range. Some of the cases below may not
2836 happen, but it doesn't seem worth worrying about this. We "continue"
2837 the outer loop when we've changed something; otherwise we "break"
2838 the switch, which will "break" the while. */
2839
2840 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
2841
2842 while (1)
2843 {
2844 code = TREE_CODE (exp);
2845
2846 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
2847 {
2848 arg0 = TREE_OPERAND (exp, 0);
2849 if (TREE_CODE_CLASS (code) == '<'
2850 || TREE_CODE_CLASS (code) == '1'
2851 || TREE_CODE_CLASS (code) == '2')
2852 type = TREE_TYPE (arg0);
2853 if (TREE_CODE_CLASS (code) == '2'
2854 || TREE_CODE_CLASS (code) == '<'
2855 || (TREE_CODE_CLASS (code) == 'e'
2856 && TREE_CODE_LENGTH (code) > 1))
2857 arg1 = TREE_OPERAND (exp, 1);
2858 }
2859
2860 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
2861 lose a cast by accident. */
2862 if (type != NULL_TREE && orig_type == NULL_TREE)
2863 orig_type = type;
2864
2865 switch (code)
2866 {
2867 case TRUTH_NOT_EXPR:
2868 in_p = ! in_p, exp = arg0;
2869 continue;
2870
2871 case EQ_EXPR: case NE_EXPR:
2872 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
2873 /* We can only do something if the range is testing for zero
2874 and if the second operand is an integer constant. Note that
2875 saying something is "in" the range we make is done by
2876 complementing IN_P since it will set in the initial case of
2877 being not equal to zero; "out" is leaving it alone. */
2878 if (low == 0 || high == 0
2879 || ! integer_zerop (low) || ! integer_zerop (high)
2880 || TREE_CODE (arg1) != INTEGER_CST)
2881 break;
2882
2883 switch (code)
2884 {
2885 case NE_EXPR: /* - [c, c] */
2886 low = high = arg1;
2887 break;
2888 case EQ_EXPR: /* + [c, c] */
2889 in_p = ! in_p, low = high = arg1;
2890 break;
2891 case GT_EXPR: /* - [-, c] */
2892 low = 0, high = arg1;
2893 break;
2894 case GE_EXPR: /* + [c, -] */
2895 in_p = ! in_p, low = arg1, high = 0;
2896 break;
2897 case LT_EXPR: /* - [c, -] */
2898 low = arg1, high = 0;
2899 break;
2900 case LE_EXPR: /* + [-, c] */
2901 in_p = ! in_p, low = 0, high = arg1;
2902 break;
2903 default:
2904 abort ();
2905 }
2906
2907 exp = arg0;
2908
2909 /* If this is an unsigned comparison, we also know that EXP is
2910 greater than or equal to zero. We base the range tests we make
2911 on that fact, so we record it here so we can parse existing
2912 range tests. */
2913 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
2914 {
2915 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
2916 1, convert (type, integer_zero_node),
2917 NULL_TREE))
2918 break;
2919
2920 in_p = n_in_p, low = n_low, high = n_high;
2921
2922 /* If the high bound is missing, but we
2923 have a low bound, reverse the range so
2924 it goes from zero to the low bound minus 1. */
2925 if (high == 0 && low)
2926 {
2927 in_p = ! in_p;
2928 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
2929 integer_one_node, 0);
2930 low = convert (type, integer_zero_node);
2931 }
2932 }
2933 continue;
2934
2935 case NEGATE_EXPR:
2936 /* (-x) IN [a,b] -> x in [-b, -a] */
2937 n_low = range_binop (MINUS_EXPR, type,
2938 convert (type, integer_zero_node), 0, high, 1);
2939 n_high = range_binop (MINUS_EXPR, type,
2940 convert (type, integer_zero_node), 0, low, 0);
2941 low = n_low, high = n_high;
2942 exp = arg0;
2943 continue;
2944
2945 case BIT_NOT_EXPR:
2946 /* ~ X -> -X - 1 */
2947 exp = build (MINUS_EXPR, type, negate_expr (arg0),
2948 convert (type, integer_one_node));
2949 continue;
2950
2951 case PLUS_EXPR: case MINUS_EXPR:
2952 if (TREE_CODE (arg1) != INTEGER_CST)
2953 break;
2954
2955 /* If EXP is signed, any overflow in the computation is undefined,
2956 so we don't worry about it so long as our computations on
2957 the bounds don't overflow. For unsigned, overflow is defined
2958 and this is exactly the right thing. */
2959 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
2960 type, low, 0, arg1, 0);
2961 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
2962 type, high, 1, arg1, 0);
2963 if ((n_low != 0 && TREE_OVERFLOW (n_low))
2964 || (n_high != 0 && TREE_OVERFLOW (n_high)))
2965 break;
2966
2967 /* Check for an unsigned range which has wrapped around the maximum
2968 value thus making n_high < n_low, and normalize it. */
2969 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
2970 {
2971 low = range_binop (PLUS_EXPR, type, n_high, 0,
2972 integer_one_node, 0);
2973 high = range_binop (MINUS_EXPR, type, n_low, 0,
2974 integer_one_node, 0);
2975
2976 /* If the range is of the form +/- [ x+1, x ], we won't
2977 be able to normalize it. But then, it represents the
2978 whole range or the empty set, so make it
2979 +/- [ -, - ]. */
2980 if (tree_int_cst_equal (n_low, low)
2981 && tree_int_cst_equal (n_high, high))
2982 low = high = 0;
2983 else
2984 in_p = ! in_p;
2985 }
2986 else
2987 low = n_low, high = n_high;
2988
2989 exp = arg0;
2990 continue;
2991
2992 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
2993 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
2994 break;
2995
2996 if (! INTEGRAL_TYPE_P (type)
2997 || (low != 0 && ! int_fits_type_p (low, type))
2998 || (high != 0 && ! int_fits_type_p (high, type)))
2999 break;
3000
3001 n_low = low, n_high = high;
3002
3003 if (n_low != 0)
3004 n_low = convert (type, n_low);
3005
3006 if (n_high != 0)
3007 n_high = convert (type, n_high);
3008
3009 /* If we're converting from an unsigned to a signed type,
3010 we will be doing the comparison as unsigned. The tests above
3011 have already verified that LOW and HIGH are both positive.
3012
3013 So we have to make sure that the original unsigned value will
3014 be interpreted as positive. */
3015 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3016 {
3017 tree equiv_type = (*lang_hooks.types.type_for_mode)
3018 (TYPE_MODE (type), 1);
3019 tree high_positive;
3020
3021 /* A range without an upper bound is, naturally, unbounded.
3022 Since convert would have cropped a very large value, use
3023 the max value for the destination type. */
3024 high_positive
3025 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3026 : TYPE_MAX_VALUE (type);
3027
3028 high_positive = fold (build (RSHIFT_EXPR, type,
3029 convert (type, high_positive),
3030 convert (type, integer_one_node)));
3031
3032 /* If the low bound is specified, "and" the range with the
3033 range for which the original unsigned value will be
3034 positive. */
3035 if (low != 0)
3036 {
3037 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3038 1, n_low, n_high,
3039 1, convert (type, integer_zero_node),
3040 high_positive))
3041 break;
3042
3043 in_p = (n_in_p == in_p);
3044 }
3045 else
3046 {
3047 /* Otherwise, "or" the range with the range of the input
3048 that will be interpreted as negative. */
3049 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3050 0, n_low, n_high,
3051 1, convert (type, integer_zero_node),
3052 high_positive))
3053 break;
3054
3055 in_p = (in_p != n_in_p);
3056 }
3057 }
3058
3059 exp = arg0;
3060 low = n_low, high = n_high;
3061 continue;
3062
3063 default:
3064 break;
3065 }
3066
3067 break;
3068 }
3069
3070 /* If EXP is a constant, we can evaluate whether this is true or false. */
3071 if (TREE_CODE (exp) == INTEGER_CST)
3072 {
3073 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3074 exp, 0, low, 0))
3075 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3076 exp, 1, high, 1)));
3077 low = high = 0;
3078 exp = 0;
3079 }
3080
3081 *pin_p = in_p, *plow = low, *phigh = high;
3082 return exp;
3083 }
3084 \f
3085 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3086 type, TYPE, return an expression to test if EXP is in (or out of, depending
3087 on IN_P) the range. */
3088
3089 static tree
3090 build_range_check (type, exp, in_p, low, high)
3091 tree type;
3092 tree exp;
3093 int in_p;
3094 tree low, high;
3095 {
3096 tree etype = TREE_TYPE (exp);
3097 tree value;
3098
3099 if (! in_p
3100 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3101 return invert_truthvalue (value);
3102
3103 if (low == 0 && high == 0)
3104 return convert (type, integer_one_node);
3105
3106 if (low == 0)
3107 return fold (build (LE_EXPR, type, exp, high));
3108
3109 if (high == 0)
3110 return fold (build (GE_EXPR, type, exp, low));
3111
3112 if (operand_equal_p (low, high, 0))
3113 return fold (build (EQ_EXPR, type, exp, low));
3114
3115 if (integer_zerop (low))
3116 {
3117 if (! TREE_UNSIGNED (etype))
3118 {
3119 etype = (*lang_hooks.types.unsigned_type) (etype);
3120 high = convert (etype, high);
3121 exp = convert (etype, exp);
3122 }
3123 return build_range_check (type, exp, 1, 0, high);
3124 }
3125
3126 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3127 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3128 {
3129 unsigned HOST_WIDE_INT lo;
3130 HOST_WIDE_INT hi;
3131 int prec;
3132
3133 prec = TYPE_PRECISION (etype);
3134 if (prec <= HOST_BITS_PER_WIDE_INT)
3135 {
3136 hi = 0;
3137 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3138 }
3139 else
3140 {
3141 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3142 lo = (unsigned HOST_WIDE_INT) -1;
3143 }
3144
3145 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3146 {
3147 if (TREE_UNSIGNED (etype))
3148 {
3149 etype = (*lang_hooks.types.signed_type) (etype);
3150 exp = convert (etype, exp);
3151 }
3152 return fold (build (GT_EXPR, type, exp,
3153 convert (etype, integer_zero_node)));
3154 }
3155 }
3156
3157 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3158 && ! TREE_OVERFLOW (value))
3159 return build_range_check (type,
3160 fold (build (MINUS_EXPR, etype, exp, low)),
3161 1, convert (etype, integer_zero_node), value);
3162
3163 return 0;
3164 }
3165 \f
3166 /* Given two ranges, see if we can merge them into one. Return 1 if we
3167 can, 0 if we can't. Set the output range into the specified parameters. */
3168
3169 static int
3170 merge_ranges (pin_p, plow, phigh, in0_p, low0, high0, in1_p, low1, high1)
3171 int *pin_p;
3172 tree *plow, *phigh;
3173 int in0_p, in1_p;
3174 tree low0, high0, low1, high1;
3175 {
3176 int no_overlap;
3177 int subset;
3178 int temp;
3179 tree tem;
3180 int in_p;
3181 tree low, high;
3182 int lowequal = ((low0 == 0 && low1 == 0)
3183 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3184 low0, 0, low1, 0)));
3185 int highequal = ((high0 == 0 && high1 == 0)
3186 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3187 high0, 1, high1, 1)));
3188
3189 /* Make range 0 be the range that starts first, or ends last if they
3190 start at the same value. Swap them if it isn't. */
3191 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3192 low0, 0, low1, 0))
3193 || (lowequal
3194 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3195 high1, 1, high0, 1))))
3196 {
3197 temp = in0_p, in0_p = in1_p, in1_p = temp;
3198 tem = low0, low0 = low1, low1 = tem;
3199 tem = high0, high0 = high1, high1 = tem;
3200 }
3201
3202 /* Now flag two cases, whether the ranges are disjoint or whether the
3203 second range is totally subsumed in the first. Note that the tests
3204 below are simplified by the ones above. */
3205 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3206 high0, 1, low1, 0));
3207 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3208 high1, 1, high0, 1));
3209
3210 /* We now have four cases, depending on whether we are including or
3211 excluding the two ranges. */
3212 if (in0_p && in1_p)
3213 {
3214 /* If they don't overlap, the result is false. If the second range
3215 is a subset it is the result. Otherwise, the range is from the start
3216 of the second to the end of the first. */
3217 if (no_overlap)
3218 in_p = 0, low = high = 0;
3219 else if (subset)
3220 in_p = 1, low = low1, high = high1;
3221 else
3222 in_p = 1, low = low1, high = high0;
3223 }
3224
3225 else if (in0_p && ! in1_p)
3226 {
3227 /* If they don't overlap, the result is the first range. If they are
3228 equal, the result is false. If the second range is a subset of the
3229 first, and the ranges begin at the same place, we go from just after
3230 the end of the first range to the end of the second. If the second
3231 range is not a subset of the first, or if it is a subset and both
3232 ranges end at the same place, the range starts at the start of the
3233 first range and ends just before the second range.
3234 Otherwise, we can't describe this as a single range. */
3235 if (no_overlap)
3236 in_p = 1, low = low0, high = high0;
3237 else if (lowequal && highequal)
3238 in_p = 0, low = high = 0;
3239 else if (subset && lowequal)
3240 {
3241 in_p = 1, high = high0;
3242 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3243 integer_one_node, 0);
3244 }
3245 else if (! subset || highequal)
3246 {
3247 in_p = 1, low = low0;
3248 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3249 integer_one_node, 0);
3250 }
3251 else
3252 return 0;
3253 }
3254
3255 else if (! in0_p && in1_p)
3256 {
3257 /* If they don't overlap, the result is the second range. If the second
3258 is a subset of the first, the result is false. Otherwise,
3259 the range starts just after the first range and ends at the
3260 end of the second. */
3261 if (no_overlap)
3262 in_p = 1, low = low1, high = high1;
3263 else if (subset || highequal)
3264 in_p = 0, low = high = 0;
3265 else
3266 {
3267 in_p = 1, high = high1;
3268 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3269 integer_one_node, 0);
3270 }
3271 }
3272
3273 else
3274 {
3275 /* The case where we are excluding both ranges. Here the complex case
3276 is if they don't overlap. In that case, the only time we have a
3277 range is if they are adjacent. If the second is a subset of the
3278 first, the result is the first. Otherwise, the range to exclude
3279 starts at the beginning of the first range and ends at the end of the
3280 second. */
3281 if (no_overlap)
3282 {
3283 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3284 range_binop (PLUS_EXPR, NULL_TREE,
3285 high0, 1,
3286 integer_one_node, 1),
3287 1, low1, 0)))
3288 in_p = 0, low = low0, high = high1;
3289 else
3290 return 0;
3291 }
3292 else if (subset)
3293 in_p = 0, low = low0, high = high0;
3294 else
3295 in_p = 0, low = low0, high = high1;
3296 }
3297
3298 *pin_p = in_p, *plow = low, *phigh = high;
3299 return 1;
3300 }
3301 \f
3302 /* EXP is some logical combination of boolean tests. See if we can
3303 merge it into some range test. Return the new tree if so. */
3304
3305 static tree
3306 fold_range_test (exp)
3307 tree exp;
3308 {
3309 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3310 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3311 int in0_p, in1_p, in_p;
3312 tree low0, low1, low, high0, high1, high;
3313 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3314 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3315 tree tem;
3316
3317 /* If this is an OR operation, invert both sides; we will invert
3318 again at the end. */
3319 if (or_op)
3320 in0_p = ! in0_p, in1_p = ! in1_p;
3321
3322 /* If both expressions are the same, if we can merge the ranges, and we
3323 can build the range test, return it or it inverted. If one of the
3324 ranges is always true or always false, consider it to be the same
3325 expression as the other. */
3326 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3327 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3328 in1_p, low1, high1)
3329 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3330 lhs != 0 ? lhs
3331 : rhs != 0 ? rhs : integer_zero_node,
3332 in_p, low, high))))
3333 return or_op ? invert_truthvalue (tem) : tem;
3334
3335 /* On machines where the branch cost is expensive, if this is a
3336 short-circuited branch and the underlying object on both sides
3337 is the same, make a non-short-circuit operation. */
3338 else if (BRANCH_COST >= 2
3339 && lhs != 0 && rhs != 0
3340 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3341 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3342 && operand_equal_p (lhs, rhs, 0))
3343 {
3344 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3345 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3346 which cases we can't do this. */
3347 if (simple_operand_p (lhs))
3348 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3349 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3350 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3351 TREE_OPERAND (exp, 1));
3352
3353 else if ((*lang_hooks.decls.global_bindings_p) () == 0
3354 && ! contains_placeholder_p (lhs))
3355 {
3356 tree common = save_expr (lhs);
3357
3358 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3359 or_op ? ! in0_p : in0_p,
3360 low0, high0))
3361 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3362 or_op ? ! in1_p : in1_p,
3363 low1, high1))))
3364 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3365 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3366 TREE_TYPE (exp), lhs, rhs);
3367 }
3368 }
3369
3370 return 0;
3371 }
3372 \f
3373 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3374 bit value. Arrange things so the extra bits will be set to zero if and
3375 only if C is signed-extended to its full width. If MASK is nonzero,
3376 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3377
3378 static tree
3379 unextend (c, p, unsignedp, mask)
3380 tree c;
3381 int p;
3382 int unsignedp;
3383 tree mask;
3384 {
3385 tree type = TREE_TYPE (c);
3386 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3387 tree temp;
3388
3389 if (p == modesize || unsignedp)
3390 return c;
3391
3392 /* We work by getting just the sign bit into the low-order bit, then
3393 into the high-order bit, then sign-extend. We then XOR that value
3394 with C. */
3395 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3396 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3397
3398 /* We must use a signed type in order to get an arithmetic right shift.
3399 However, we must also avoid introducing accidental overflows, so that
3400 a subsequent call to integer_zerop will work. Hence we must
3401 do the type conversion here. At this point, the constant is either
3402 zero or one, and the conversion to a signed type can never overflow.
3403 We could get an overflow if this conversion is done anywhere else. */
3404 if (TREE_UNSIGNED (type))
3405 temp = convert ((*lang_hooks.types.signed_type) (type), temp);
3406
3407 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3408 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3409 if (mask != 0)
3410 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3411 /* If necessary, convert the type back to match the type of C. */
3412 if (TREE_UNSIGNED (type))
3413 temp = convert (type, temp);
3414
3415 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3416 }
3417 \f
3418 /* Find ways of folding logical expressions of LHS and RHS:
3419 Try to merge two comparisons to the same innermost item.
3420 Look for range tests like "ch >= '0' && ch <= '9'".
3421 Look for combinations of simple terms on machines with expensive branches
3422 and evaluate the RHS unconditionally.
3423
3424 For example, if we have p->a == 2 && p->b == 4 and we can make an
3425 object large enough to span both A and B, we can do this with a comparison
3426 against the object ANDed with the a mask.
3427
3428 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3429 operations to do this with one comparison.
3430
3431 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3432 function and the one above.
3433
3434 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3435 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3436
3437 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3438 two operands.
3439
3440 We return the simplified tree or 0 if no optimization is possible. */
3441
3442 static tree
3443 fold_truthop (code, truth_type, lhs, rhs)
3444 enum tree_code code;
3445 tree truth_type, lhs, rhs;
3446 {
3447 /* If this is the "or" of two comparisons, we can do something if
3448 the comparisons are NE_EXPR. If this is the "and", we can do something
3449 if the comparisons are EQ_EXPR. I.e.,
3450 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3451
3452 WANTED_CODE is this operation code. For single bit fields, we can
3453 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3454 comparison for one-bit fields. */
3455
3456 enum tree_code wanted_code;
3457 enum tree_code lcode, rcode;
3458 tree ll_arg, lr_arg, rl_arg, rr_arg;
3459 tree ll_inner, lr_inner, rl_inner, rr_inner;
3460 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3461 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3462 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3463 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3464 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3465 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3466 enum machine_mode lnmode, rnmode;
3467 tree ll_mask, lr_mask, rl_mask, rr_mask;
3468 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3469 tree l_const, r_const;
3470 tree lntype, rntype, result;
3471 int first_bit, end_bit;
3472 int volatilep;
3473
3474 /* Start by getting the comparison codes. Fail if anything is volatile.
3475 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3476 it were surrounded with a NE_EXPR. */
3477
3478 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3479 return 0;
3480
3481 lcode = TREE_CODE (lhs);
3482 rcode = TREE_CODE (rhs);
3483
3484 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3485 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3486
3487 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3488 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3489
3490 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3491 return 0;
3492
3493 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3494 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3495
3496 ll_arg = TREE_OPERAND (lhs, 0);
3497 lr_arg = TREE_OPERAND (lhs, 1);
3498 rl_arg = TREE_OPERAND (rhs, 0);
3499 rr_arg = TREE_OPERAND (rhs, 1);
3500
3501 /* If the RHS can be evaluated unconditionally and its operands are
3502 simple, it wins to evaluate the RHS unconditionally on machines
3503 with expensive branches. In this case, this isn't a comparison
3504 that can be merged. Avoid doing this if the RHS is a floating-point
3505 comparison since those can trap. */
3506
3507 if (BRANCH_COST >= 2
3508 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3509 && simple_operand_p (rl_arg)
3510 && simple_operand_p (rr_arg))
3511 {
3512 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
3513 if (code == TRUTH_OR_EXPR
3514 && lcode == NE_EXPR && integer_zerop (lr_arg)
3515 && rcode == NE_EXPR && integer_zerop (rr_arg)
3516 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3517 return build (NE_EXPR, truth_type,
3518 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3519 ll_arg, rl_arg),
3520 integer_zero_node);
3521
3522 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
3523 if (code == TRUTH_AND_EXPR
3524 && lcode == EQ_EXPR && integer_zerop (lr_arg)
3525 && rcode == EQ_EXPR && integer_zerop (rr_arg)
3526 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3527 return build (EQ_EXPR, truth_type,
3528 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3529 ll_arg, rl_arg),
3530 integer_zero_node);
3531
3532 return build (code, truth_type, lhs, rhs);
3533 }
3534
3535 /* See if the comparisons can be merged. Then get all the parameters for
3536 each side. */
3537
3538 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3539 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3540 return 0;
3541
3542 volatilep = 0;
3543 ll_inner = decode_field_reference (ll_arg,
3544 &ll_bitsize, &ll_bitpos, &ll_mode,
3545 &ll_unsignedp, &volatilep, &ll_mask,
3546 &ll_and_mask);
3547 lr_inner = decode_field_reference (lr_arg,
3548 &lr_bitsize, &lr_bitpos, &lr_mode,
3549 &lr_unsignedp, &volatilep, &lr_mask,
3550 &lr_and_mask);
3551 rl_inner = decode_field_reference (rl_arg,
3552 &rl_bitsize, &rl_bitpos, &rl_mode,
3553 &rl_unsignedp, &volatilep, &rl_mask,
3554 &rl_and_mask);
3555 rr_inner = decode_field_reference (rr_arg,
3556 &rr_bitsize, &rr_bitpos, &rr_mode,
3557 &rr_unsignedp, &volatilep, &rr_mask,
3558 &rr_and_mask);
3559
3560 /* It must be true that the inner operation on the lhs of each
3561 comparison must be the same if we are to be able to do anything.
3562 Then see if we have constants. If not, the same must be true for
3563 the rhs's. */
3564 if (volatilep || ll_inner == 0 || rl_inner == 0
3565 || ! operand_equal_p (ll_inner, rl_inner, 0))
3566 return 0;
3567
3568 if (TREE_CODE (lr_arg) == INTEGER_CST
3569 && TREE_CODE (rr_arg) == INTEGER_CST)
3570 l_const = lr_arg, r_const = rr_arg;
3571 else if (lr_inner == 0 || rr_inner == 0
3572 || ! operand_equal_p (lr_inner, rr_inner, 0))
3573 return 0;
3574 else
3575 l_const = r_const = 0;
3576
3577 /* If either comparison code is not correct for our logical operation,
3578 fail. However, we can convert a one-bit comparison against zero into
3579 the opposite comparison against that bit being set in the field. */
3580
3581 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3582 if (lcode != wanted_code)
3583 {
3584 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3585 {
3586 /* Make the left operand unsigned, since we are only interested
3587 in the value of one bit. Otherwise we are doing the wrong
3588 thing below. */
3589 ll_unsignedp = 1;
3590 l_const = ll_mask;
3591 }
3592 else
3593 return 0;
3594 }
3595
3596 /* This is analogous to the code for l_const above. */
3597 if (rcode != wanted_code)
3598 {
3599 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3600 {
3601 rl_unsignedp = 1;
3602 r_const = rl_mask;
3603 }
3604 else
3605 return 0;
3606 }
3607
3608 /* See if we can find a mode that contains both fields being compared on
3609 the left. If we can't, fail. Otherwise, update all constants and masks
3610 to be relative to a field of that size. */
3611 first_bit = MIN (ll_bitpos, rl_bitpos);
3612 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3613 lnmode = get_best_mode (end_bit - first_bit, first_bit,
3614 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3615 volatilep);
3616 if (lnmode == VOIDmode)
3617 return 0;
3618
3619 lnbitsize = GET_MODE_BITSIZE (lnmode);
3620 lnbitpos = first_bit & ~ (lnbitsize - 1);
3621 lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
3622 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3623
3624 if (BYTES_BIG_ENDIAN)
3625 {
3626 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3627 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3628 }
3629
3630 ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
3631 size_int (xll_bitpos), 0);
3632 rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
3633 size_int (xrl_bitpos), 0);
3634
3635 if (l_const)
3636 {
3637 l_const = convert (lntype, l_const);
3638 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3639 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3640 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3641 fold (build1 (BIT_NOT_EXPR,
3642 lntype, ll_mask)),
3643 0)))
3644 {
3645 warning ("comparison is always %d", wanted_code == NE_EXPR);
3646
3647 return convert (truth_type,
3648 wanted_code == NE_EXPR
3649 ? integer_one_node : integer_zero_node);
3650 }
3651 }
3652 if (r_const)
3653 {
3654 r_const = convert (lntype, r_const);
3655 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3656 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3657 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3658 fold (build1 (BIT_NOT_EXPR,
3659 lntype, rl_mask)),
3660 0)))
3661 {
3662 warning ("comparison is always %d", wanted_code == NE_EXPR);
3663
3664 return convert (truth_type,
3665 wanted_code == NE_EXPR
3666 ? integer_one_node : integer_zero_node);
3667 }
3668 }
3669
3670 /* If the right sides are not constant, do the same for it. Also,
3671 disallow this optimization if a size or signedness mismatch occurs
3672 between the left and right sides. */
3673 if (l_const == 0)
3674 {
3675 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3676 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3677 /* Make sure the two fields on the right
3678 correspond to the left without being swapped. */
3679 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3680 return 0;
3681
3682 first_bit = MIN (lr_bitpos, rr_bitpos);
3683 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3684 rnmode = get_best_mode (end_bit - first_bit, first_bit,
3685 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3686 volatilep);
3687 if (rnmode == VOIDmode)
3688 return 0;
3689
3690 rnbitsize = GET_MODE_BITSIZE (rnmode);
3691 rnbitpos = first_bit & ~ (rnbitsize - 1);
3692 rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
3693 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3694
3695 if (BYTES_BIG_ENDIAN)
3696 {
3697 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3698 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3699 }
3700
3701 lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
3702 size_int (xlr_bitpos), 0);
3703 rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
3704 size_int (xrr_bitpos), 0);
3705
3706 /* Make a mask that corresponds to both fields being compared.
3707 Do this for both items being compared. If the operands are the
3708 same size and the bits being compared are in the same position
3709 then we can do this by masking both and comparing the masked
3710 results. */
3711 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3712 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3713 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
3714 {
3715 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3716 ll_unsignedp || rl_unsignedp);
3717 if (! all_ones_mask_p (ll_mask, lnbitsize))
3718 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
3719
3720 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
3721 lr_unsignedp || rr_unsignedp);
3722 if (! all_ones_mask_p (lr_mask, rnbitsize))
3723 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
3724
3725 return build (wanted_code, truth_type, lhs, rhs);
3726 }
3727
3728 /* There is still another way we can do something: If both pairs of
3729 fields being compared are adjacent, we may be able to make a wider
3730 field containing them both.
3731
3732 Note that we still must mask the lhs/rhs expressions. Furthermore,
3733 the mask must be shifted to account for the shift done by
3734 make_bit_field_ref. */
3735 if ((ll_bitsize + ll_bitpos == rl_bitpos
3736 && lr_bitsize + lr_bitpos == rr_bitpos)
3737 || (ll_bitpos == rl_bitpos + rl_bitsize
3738 && lr_bitpos == rr_bitpos + rr_bitsize))
3739 {
3740 tree type;
3741
3742 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
3743 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
3744 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
3745 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
3746
3747 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
3748 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
3749 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
3750 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
3751
3752 /* Convert to the smaller type before masking out unwanted bits. */
3753 type = lntype;
3754 if (lntype != rntype)
3755 {
3756 if (lnbitsize > rnbitsize)
3757 {
3758 lhs = convert (rntype, lhs);
3759 ll_mask = convert (rntype, ll_mask);
3760 type = rntype;
3761 }
3762 else if (lnbitsize < rnbitsize)
3763 {
3764 rhs = convert (lntype, rhs);
3765 lr_mask = convert (lntype, lr_mask);
3766 type = lntype;
3767 }
3768 }
3769
3770 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
3771 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3772
3773 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
3774 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
3775
3776 return build (wanted_code, truth_type, lhs, rhs);
3777 }
3778
3779 return 0;
3780 }
3781
3782 /* Handle the case of comparisons with constants. If there is something in
3783 common between the masks, those bits of the constants must be the same.
3784 If not, the condition is always false. Test for this to avoid generating
3785 incorrect code below. */
3786 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3787 if (! integer_zerop (result)
3788 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3789 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3790 {
3791 if (wanted_code == NE_EXPR)
3792 {
3793 warning ("`or' of unmatched not-equal tests is always 1");
3794 return convert (truth_type, integer_one_node);
3795 }
3796 else
3797 {
3798 warning ("`and' of mutually exclusive equal-tests is always 0");
3799 return convert (truth_type, integer_zero_node);
3800 }
3801 }
3802
3803 /* Construct the expression we will return. First get the component
3804 reference we will make. Unless the mask is all ones the width of
3805 that field, perform the mask operation. Then compare with the
3806 merged constant. */
3807 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3808 ll_unsignedp || rl_unsignedp);
3809
3810 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3811 if (! all_ones_mask_p (ll_mask, lnbitsize))
3812 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
3813
3814 return build (wanted_code, truth_type, result,
3815 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
3816 }
3817 \f
3818 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
3819 constant. */
3820
3821 static tree
3822 optimize_minmax_comparison (t)
3823 tree t;
3824 {
3825 tree type = TREE_TYPE (t);
3826 tree arg0 = TREE_OPERAND (t, 0);
3827 enum tree_code op_code;
3828 tree comp_const = TREE_OPERAND (t, 1);
3829 tree minmax_const;
3830 int consts_equal, consts_lt;
3831 tree inner;
3832
3833 STRIP_SIGN_NOPS (arg0);
3834
3835 op_code = TREE_CODE (arg0);
3836 minmax_const = TREE_OPERAND (arg0, 1);
3837 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
3838 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
3839 inner = TREE_OPERAND (arg0, 0);
3840
3841 /* If something does not permit us to optimize, return the original tree. */
3842 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
3843 || TREE_CODE (comp_const) != INTEGER_CST
3844 || TREE_CONSTANT_OVERFLOW (comp_const)
3845 || TREE_CODE (minmax_const) != INTEGER_CST
3846 || TREE_CONSTANT_OVERFLOW (minmax_const))
3847 return t;
3848
3849 /* Now handle all the various comparison codes. We only handle EQ_EXPR
3850 and GT_EXPR, doing the rest with recursive calls using logical
3851 simplifications. */
3852 switch (TREE_CODE (t))
3853 {
3854 case NE_EXPR: case LT_EXPR: case LE_EXPR:
3855 return
3856 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
3857
3858 case GE_EXPR:
3859 return
3860 fold (build (TRUTH_ORIF_EXPR, type,
3861 optimize_minmax_comparison
3862 (build (EQ_EXPR, type, arg0, comp_const)),
3863 optimize_minmax_comparison
3864 (build (GT_EXPR, type, arg0, comp_const))));
3865
3866 case EQ_EXPR:
3867 if (op_code == MAX_EXPR && consts_equal)
3868 /* MAX (X, 0) == 0 -> X <= 0 */
3869 return fold (build (LE_EXPR, type, inner, comp_const));
3870
3871 else if (op_code == MAX_EXPR && consts_lt)
3872 /* MAX (X, 0) == 5 -> X == 5 */
3873 return fold (build (EQ_EXPR, type, inner, comp_const));
3874
3875 else if (op_code == MAX_EXPR)
3876 /* MAX (X, 0) == -1 -> false */
3877 return omit_one_operand (type, integer_zero_node, inner);
3878
3879 else if (consts_equal)
3880 /* MIN (X, 0) == 0 -> X >= 0 */
3881 return fold (build (GE_EXPR, type, inner, comp_const));
3882
3883 else if (consts_lt)
3884 /* MIN (X, 0) == 5 -> false */
3885 return omit_one_operand (type, integer_zero_node, inner);
3886
3887 else
3888 /* MIN (X, 0) == -1 -> X == -1 */
3889 return fold (build (EQ_EXPR, type, inner, comp_const));
3890
3891 case GT_EXPR:
3892 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
3893 /* MAX (X, 0) > 0 -> X > 0
3894 MAX (X, 0) > 5 -> X > 5 */
3895 return fold (build (GT_EXPR, type, inner, comp_const));
3896
3897 else if (op_code == MAX_EXPR)
3898 /* MAX (X, 0) > -1 -> true */
3899 return omit_one_operand (type, integer_one_node, inner);
3900
3901 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
3902 /* MIN (X, 0) > 0 -> false
3903 MIN (X, 0) > 5 -> false */
3904 return omit_one_operand (type, integer_zero_node, inner);
3905
3906 else
3907 /* MIN (X, 0) > -1 -> X > -1 */
3908 return fold (build (GT_EXPR, type, inner, comp_const));
3909
3910 default:
3911 return t;
3912 }
3913 }
3914 \f
3915 /* T is an integer expression that is being multiplied, divided, or taken a
3916 modulus (CODE says which and what kind of divide or modulus) by a
3917 constant C. See if we can eliminate that operation by folding it with
3918 other operations already in T. WIDE_TYPE, if non-null, is a type that
3919 should be used for the computation if wider than our type.
3920
3921 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
3922 (X * 2) + (Y * 4). We must, however, be assured that either the original
3923 expression would not overflow or that overflow is undefined for the type
3924 in the language in question.
3925
3926 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
3927 the machine has a multiply-accumulate insn or that this is part of an
3928 addressing calculation.
3929
3930 If we return a non-null expression, it is an equivalent form of the
3931 original computation, but need not be in the original type. */
3932
3933 static tree
3934 extract_muldiv (t, c, code, wide_type)
3935 tree t;
3936 tree c;
3937 enum tree_code code;
3938 tree wide_type;
3939 {
3940 tree type = TREE_TYPE (t);
3941 enum tree_code tcode = TREE_CODE (t);
3942 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
3943 > GET_MODE_SIZE (TYPE_MODE (type)))
3944 ? wide_type : type);
3945 tree t1, t2;
3946 int same_p = tcode == code;
3947 tree op0 = NULL_TREE, op1 = NULL_TREE;
3948
3949 /* Don't deal with constants of zero here; they confuse the code below. */
3950 if (integer_zerop (c))
3951 return NULL_TREE;
3952
3953 if (TREE_CODE_CLASS (tcode) == '1')
3954 op0 = TREE_OPERAND (t, 0);
3955
3956 if (TREE_CODE_CLASS (tcode) == '2')
3957 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
3958
3959 /* Note that we need not handle conditional operations here since fold
3960 already handles those cases. So just do arithmetic here. */
3961 switch (tcode)
3962 {
3963 case INTEGER_CST:
3964 /* For a constant, we can always simplify if we are a multiply
3965 or (for divide and modulus) if it is a multiple of our constant. */
3966 if (code == MULT_EXPR
3967 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
3968 return const_binop (code, convert (ctype, t), convert (ctype, c), 0);
3969 break;
3970
3971 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
3972 /* If op0 is an expression ... */
3973 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
3974 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
3975 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
3976 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
3977 /* ... and is unsigned, and its type is smaller than ctype,
3978 then we cannot pass through as widening. */
3979 && ((TREE_UNSIGNED (TREE_TYPE (op0))
3980 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
3981 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
3982 && (GET_MODE_SIZE (TYPE_MODE (ctype))
3983 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
3984 /* ... or its type is larger than ctype,
3985 then we cannot pass through this truncation. */
3986 || (GET_MODE_SIZE (TYPE_MODE (ctype))
3987 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))))
3988 break;
3989
3990 /* Pass the constant down and see if we can make a simplification. If
3991 we can, replace this expression with the inner simplification for
3992 possible later conversion to our or some other type. */
3993 if (0 != (t1 = extract_muldiv (op0, convert (TREE_TYPE (op0), c), code,
3994 code == MULT_EXPR ? ctype : NULL_TREE)))
3995 return t1;
3996 break;
3997
3998 case NEGATE_EXPR: case ABS_EXPR:
3999 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4000 return fold (build1 (tcode, ctype, convert (ctype, t1)));
4001 break;
4002
4003 case MIN_EXPR: case MAX_EXPR:
4004 /* If widening the type changes the signedness, then we can't perform
4005 this optimization as that changes the result. */
4006 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4007 break;
4008
4009 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4010 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4011 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4012 {
4013 if (tree_int_cst_sgn (c) < 0)
4014 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4015
4016 return fold (build (tcode, ctype, convert (ctype, t1),
4017 convert (ctype, t2)));
4018 }
4019 break;
4020
4021 case WITH_RECORD_EXPR:
4022 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4023 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4024 TREE_OPERAND (t, 1));
4025 break;
4026
4027 case SAVE_EXPR:
4028 /* If this has not been evaluated and the operand has no side effects,
4029 we can see if we can do something inside it and make a new one.
4030 Note that this test is overly conservative since we can do this
4031 if the only reason it had side effects is that it was another
4032 similar SAVE_EXPR, but that isn't worth bothering with. */
4033 if (SAVE_EXPR_RTL (t) == 0 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0))
4034 && 0 != (t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code,
4035 wide_type)))
4036 {
4037 t1 = save_expr (t1);
4038 if (SAVE_EXPR_PERSISTENT_P (t) && TREE_CODE (t1) == SAVE_EXPR)
4039 SAVE_EXPR_PERSISTENT_P (t1) = 1;
4040 if (is_pending_size (t))
4041 put_pending_size (t1);
4042 return t1;
4043 }
4044 break;
4045
4046 case LSHIFT_EXPR: case RSHIFT_EXPR:
4047 /* If the second operand is constant, this is a multiplication
4048 or floor division, by a power of two, so we can treat it that
4049 way unless the multiplier or divisor overflows. */
4050 if (TREE_CODE (op1) == INTEGER_CST
4051 /* const_binop may not detect overflow correctly,
4052 so check for it explicitly here. */
4053 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4054 && TREE_INT_CST_HIGH (op1) == 0
4055 && 0 != (t1 = convert (ctype,
4056 const_binop (LSHIFT_EXPR, size_one_node,
4057 op1, 0)))
4058 && ! TREE_OVERFLOW (t1))
4059 return extract_muldiv (build (tcode == LSHIFT_EXPR
4060 ? MULT_EXPR : FLOOR_DIV_EXPR,
4061 ctype, convert (ctype, op0), t1),
4062 c, code, wide_type);
4063 break;
4064
4065 case PLUS_EXPR: case MINUS_EXPR:
4066 /* See if we can eliminate the operation on both sides. If we can, we
4067 can return a new PLUS or MINUS. If we can't, the only remaining
4068 cases where we can do anything are if the second operand is a
4069 constant. */
4070 t1 = extract_muldiv (op0, c, code, wide_type);
4071 t2 = extract_muldiv (op1, c, code, wide_type);
4072 if (t1 != 0 && t2 != 0
4073 && (code == MULT_EXPR
4074 /* If not multiplication, we can only do this if either operand
4075 is divisible by c. */
4076 || multiple_of_p (ctype, op0, c)
4077 || multiple_of_p (ctype, op1, c)))
4078 return fold (build (tcode, ctype, convert (ctype, t1),
4079 convert (ctype, t2)));
4080
4081 /* If this was a subtraction, negate OP1 and set it to be an addition.
4082 This simplifies the logic below. */
4083 if (tcode == MINUS_EXPR)
4084 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4085
4086 if (TREE_CODE (op1) != INTEGER_CST)
4087 break;
4088
4089 /* If either OP1 or C are negative, this optimization is not safe for
4090 some of the division and remainder types while for others we need
4091 to change the code. */
4092 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4093 {
4094 if (code == CEIL_DIV_EXPR)
4095 code = FLOOR_DIV_EXPR;
4096 else if (code == FLOOR_DIV_EXPR)
4097 code = CEIL_DIV_EXPR;
4098 else if (code != MULT_EXPR
4099 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4100 break;
4101 }
4102
4103 /* If it's a multiply or a division/modulus operation of a multiple
4104 of our constant, do the operation and verify it doesn't overflow. */
4105 if (code == MULT_EXPR
4106 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4107 {
4108 op1 = const_binop (code, convert (ctype, op1), convert (ctype, c), 0);
4109 if (op1 == 0 || TREE_OVERFLOW (op1))
4110 break;
4111 }
4112 else
4113 break;
4114
4115 /* If we have an unsigned type is not a sizetype, we cannot widen
4116 the operation since it will change the result if the original
4117 computation overflowed. */
4118 if (TREE_UNSIGNED (ctype)
4119 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4120 && ctype != type)
4121 break;
4122
4123 /* If we were able to eliminate our operation from the first side,
4124 apply our operation to the second side and reform the PLUS. */
4125 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4126 return fold (build (tcode, ctype, convert (ctype, t1), op1));
4127
4128 /* The last case is if we are a multiply. In that case, we can
4129 apply the distributive law to commute the multiply and addition
4130 if the multiplication of the constants doesn't overflow. */
4131 if (code == MULT_EXPR)
4132 return fold (build (tcode, ctype, fold (build (code, ctype,
4133 convert (ctype, op0),
4134 convert (ctype, c))),
4135 op1));
4136
4137 break;
4138
4139 case MULT_EXPR:
4140 /* We have a special case here if we are doing something like
4141 (C * 8) % 4 since we know that's zero. */
4142 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4143 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4144 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4145 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4146 return omit_one_operand (type, integer_zero_node, op0);
4147
4148 /* ... fall through ... */
4149
4150 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4151 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4152 /* If we can extract our operation from the LHS, do so and return a
4153 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4154 do something only if the second operand is a constant. */
4155 if (same_p
4156 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4157 return fold (build (tcode, ctype, convert (ctype, t1),
4158 convert (ctype, op1)));
4159 else if (tcode == MULT_EXPR && code == MULT_EXPR
4160 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4161 return fold (build (tcode, ctype, convert (ctype, op0),
4162 convert (ctype, t1)));
4163 else if (TREE_CODE (op1) != INTEGER_CST)
4164 return 0;
4165
4166 /* If these are the same operation types, we can associate them
4167 assuming no overflow. */
4168 if (tcode == code
4169 && 0 != (t1 = const_binop (MULT_EXPR, convert (ctype, op1),
4170 convert (ctype, c), 0))
4171 && ! TREE_OVERFLOW (t1))
4172 return fold (build (tcode, ctype, convert (ctype, op0), t1));
4173
4174 /* If these operations "cancel" each other, we have the main
4175 optimizations of this pass, which occur when either constant is a
4176 multiple of the other, in which case we replace this with either an
4177 operation or CODE or TCODE.
4178
4179 If we have an unsigned type that is not a sizetype, we cannot do
4180 this since it will change the result if the original computation
4181 overflowed. */
4182 if ((! TREE_UNSIGNED (ctype)
4183 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4184 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4185 || (tcode == MULT_EXPR
4186 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4187 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4188 {
4189 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4190 return fold (build (tcode, ctype, convert (ctype, op0),
4191 convert (ctype,
4192 const_binop (TRUNC_DIV_EXPR,
4193 op1, c, 0))));
4194 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4195 return fold (build (code, ctype, convert (ctype, op0),
4196 convert (ctype,
4197 const_binop (TRUNC_DIV_EXPR,
4198 c, op1, 0))));
4199 }
4200 break;
4201
4202 default:
4203 break;
4204 }
4205
4206 return 0;
4207 }
4208 \f
4209 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4210 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4211 that we may sometimes modify the tree. */
4212
4213 static tree
4214 strip_compound_expr (t, s)
4215 tree t;
4216 tree s;
4217 {
4218 enum tree_code code = TREE_CODE (t);
4219
4220 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4221 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4222 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4223 return TREE_OPERAND (t, 1);
4224
4225 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4226 don't bother handling any other types. */
4227 else if (code == COND_EXPR)
4228 {
4229 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4230 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4231 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4232 }
4233 else if (TREE_CODE_CLASS (code) == '1')
4234 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4235 else if (TREE_CODE_CLASS (code) == '<'
4236 || TREE_CODE_CLASS (code) == '2')
4237 {
4238 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4239 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4240 }
4241
4242 return t;
4243 }
4244 \f
4245 /* Return a node which has the indicated constant VALUE (either 0 or
4246 1), and is of the indicated TYPE. */
4247
4248 static tree
4249 constant_boolean_node (value, type)
4250 int value;
4251 tree type;
4252 {
4253 if (type == integer_type_node)
4254 return value ? integer_one_node : integer_zero_node;
4255 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4256 return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4257 integer_zero_node);
4258 else
4259 {
4260 tree t = build_int_2 (value, 0);
4261
4262 TREE_TYPE (t) = type;
4263 return t;
4264 }
4265 }
4266
4267 /* Utility function for the following routine, to see how complex a nesting of
4268 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4269 we don't care (to avoid spending too much time on complex expressions.). */
4270
4271 static int
4272 count_cond (expr, lim)
4273 tree expr;
4274 int lim;
4275 {
4276 int ctrue, cfalse;
4277
4278 if (TREE_CODE (expr) != COND_EXPR)
4279 return 0;
4280 else if (lim <= 0)
4281 return 0;
4282
4283 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4284 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4285 return MIN (lim, 1 + ctrue + cfalse);
4286 }
4287
4288 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4289 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4290 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4291 expression, and ARG to `a'. If COND_FIRST_P is non-zero, then the
4292 COND is the first argument to CODE; otherwise (as in the example
4293 given here), it is the second argument. TYPE is the type of the
4294 original expression. */
4295
4296 static tree
4297 fold_binary_op_with_conditional_arg (code, type, cond, arg, cond_first_p)
4298 enum tree_code code;
4299 tree type;
4300 tree cond;
4301 tree arg;
4302 int cond_first_p;
4303 {
4304 tree test, true_value, false_value;
4305 tree lhs = NULL_TREE;
4306 tree rhs = NULL_TREE;
4307 /* In the end, we'll produce a COND_EXPR. Both arms of the
4308 conditional expression will be binary operations. The left-hand
4309 side of the expression to be executed if the condition is true
4310 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4311 of the expression to be executed if the condition is true will be
4312 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4313 but apply to the expression to be executed if the conditional is
4314 false. */
4315 tree *true_lhs;
4316 tree *true_rhs;
4317 tree *false_lhs;
4318 tree *false_rhs;
4319 /* These are the codes to use for the left-hand side and right-hand
4320 side of the COND_EXPR. Normally, they are the same as CODE. */
4321 enum tree_code lhs_code = code;
4322 enum tree_code rhs_code = code;
4323 /* And these are the types of the expressions. */
4324 tree lhs_type = type;
4325 tree rhs_type = type;
4326
4327 if (cond_first_p)
4328 {
4329 true_rhs = false_rhs = &arg;
4330 true_lhs = &true_value;
4331 false_lhs = &false_value;
4332 }
4333 else
4334 {
4335 true_lhs = false_lhs = &arg;
4336 true_rhs = &true_value;
4337 false_rhs = &false_value;
4338 }
4339
4340 if (TREE_CODE (cond) == COND_EXPR)
4341 {
4342 test = TREE_OPERAND (cond, 0);
4343 true_value = TREE_OPERAND (cond, 1);
4344 false_value = TREE_OPERAND (cond, 2);
4345 /* If this operand throws an expression, then it does not make
4346 sense to try to perform a logical or arithmetic operation
4347 involving it. Instead of building `a + throw 3' for example,
4348 we simply build `a, throw 3'. */
4349 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4350 {
4351 lhs_code = COMPOUND_EXPR;
4352 if (!cond_first_p)
4353 lhs_type = void_type_node;
4354 }
4355 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4356 {
4357 rhs_code = COMPOUND_EXPR;
4358 if (!cond_first_p)
4359 rhs_type = void_type_node;
4360 }
4361 }
4362 else
4363 {
4364 tree testtype = TREE_TYPE (cond);
4365 test = cond;
4366 true_value = convert (testtype, integer_one_node);
4367 false_value = convert (testtype, integer_zero_node);
4368 }
4369
4370 /* If ARG is complex we want to make sure we only evaluate
4371 it once. Though this is only required if it is volatile, it
4372 might be more efficient even if it is not. However, if we
4373 succeed in folding one part to a constant, we do not need
4374 to make this SAVE_EXPR. Since we do this optimization
4375 primarily to see if we do end up with constant and this
4376 SAVE_EXPR interferes with later optimizations, suppressing
4377 it when we can is important.
4378
4379 If we are not in a function, we can't make a SAVE_EXPR, so don't
4380 try to do so. Don't try to see if the result is a constant
4381 if an arm is a COND_EXPR since we get exponential behavior
4382 in that case. */
4383
4384 if (TREE_CODE (arg) != SAVE_EXPR && ! TREE_CONSTANT (arg)
4385 && (*lang_hooks.decls.global_bindings_p) () == 0
4386 && ((TREE_CODE (arg) != VAR_DECL
4387 && TREE_CODE (arg) != PARM_DECL)
4388 || TREE_SIDE_EFFECTS (arg)))
4389 {
4390 if (TREE_CODE (true_value) != COND_EXPR)
4391 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4392
4393 if (TREE_CODE (false_value) != COND_EXPR)
4394 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4395
4396 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4397 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4398 arg = save_expr (arg), lhs = rhs = 0;
4399 }
4400
4401 if (lhs == 0)
4402 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4403 if (rhs == 0)
4404 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4405
4406 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4407
4408 if (TREE_CODE (arg) == SAVE_EXPR)
4409 return build (COMPOUND_EXPR, type,
4410 convert (void_type_node, arg),
4411 strip_compound_expr (test, arg));
4412 else
4413 return convert (type, test);
4414 }
4415
4416 \f
4417 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4418
4419 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4420 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4421 ADDEND is the same as X.
4422
4423 X + 0 and X - 0 both give X when X is NaN, infinite, or non-zero
4424 and finite. The problematic cases are when X is zero, and its mode
4425 has signed zeros. In the case of rounding towards -infinity,
4426 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4427 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4428
4429 static bool
4430 fold_real_zero_addition_p (type, addend, negate)
4431 tree type, addend;
4432 int negate;
4433 {
4434 if (!real_zerop (addend))
4435 return false;
4436
4437 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4438 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4439 return true;
4440
4441 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4442 if (TREE_CODE (addend) == REAL_CST
4443 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4444 negate = !negate;
4445
4446 /* The mode has signed zeros, and we have to honor their sign.
4447 In this situation, there is only one case we can return true for.
4448 X - 0 is the same as X unless rounding towards -infinity is
4449 supported. */
4450 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4451 }
4452
4453
4454 /* Perform constant folding and related simplification of EXPR.
4455 The related simplifications include x*1 => x, x*0 => 0, etc.,
4456 and application of the associative law.
4457 NOP_EXPR conversions may be removed freely (as long as we
4458 are careful not to change the C type of the overall expression)
4459 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4460 but we can constant-fold them if they have constant operands. */
4461
4462 tree
4463 fold (expr)
4464 tree expr;
4465 {
4466 tree t = expr;
4467 tree t1 = NULL_TREE;
4468 tree tem;
4469 tree type = TREE_TYPE (expr);
4470 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4471 enum tree_code code = TREE_CODE (t);
4472 int kind = TREE_CODE_CLASS (code);
4473 int invert;
4474 /* WINS will be nonzero when the switch is done
4475 if all operands are constant. */
4476 int wins = 1;
4477
4478 /* Don't try to process an RTL_EXPR since its operands aren't trees.
4479 Likewise for a SAVE_EXPR that's already been evaluated. */
4480 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
4481 return t;
4482
4483 /* Return right away if a constant. */
4484 if (kind == 'c')
4485 return t;
4486
4487 #ifdef MAX_INTEGER_COMPUTATION_MODE
4488 check_max_integer_computation_mode (expr);
4489 #endif
4490
4491 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
4492 {
4493 tree subop;
4494
4495 /* Special case for conversion ops that can have fixed point args. */
4496 arg0 = TREE_OPERAND (t, 0);
4497
4498 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
4499 if (arg0 != 0)
4500 STRIP_SIGN_NOPS (arg0);
4501
4502 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
4503 subop = TREE_REALPART (arg0);
4504 else
4505 subop = arg0;
4506
4507 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
4508 && TREE_CODE (subop) != REAL_CST
4509 )
4510 /* Note that TREE_CONSTANT isn't enough:
4511 static var addresses are constant but we can't
4512 do arithmetic on them. */
4513 wins = 0;
4514 }
4515 else if (IS_EXPR_CODE_CLASS (kind) || kind == 'r')
4516 {
4517 int len = first_rtl_op (code);
4518 int i;
4519 for (i = 0; i < len; i++)
4520 {
4521 tree op = TREE_OPERAND (t, i);
4522 tree subop;
4523
4524 if (op == 0)
4525 continue; /* Valid for CALL_EXPR, at least. */
4526
4527 if (kind == '<' || code == RSHIFT_EXPR)
4528 {
4529 /* Signedness matters here. Perhaps we can refine this
4530 later. */
4531 STRIP_SIGN_NOPS (op);
4532 }
4533 else
4534 /* Strip any conversions that don't change the mode. */
4535 STRIP_NOPS (op);
4536
4537 if (TREE_CODE (op) == COMPLEX_CST)
4538 subop = TREE_REALPART (op);
4539 else
4540 subop = op;
4541
4542 if (TREE_CODE (subop) != INTEGER_CST
4543 && TREE_CODE (subop) != REAL_CST)
4544 /* Note that TREE_CONSTANT isn't enough:
4545 static var addresses are constant but we can't
4546 do arithmetic on them. */
4547 wins = 0;
4548
4549 if (i == 0)
4550 arg0 = op;
4551 else if (i == 1)
4552 arg1 = op;
4553 }
4554 }
4555
4556 /* If this is a commutative operation, and ARG0 is a constant, move it
4557 to ARG1 to reduce the number of tests below. */
4558 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
4559 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
4560 || code == BIT_AND_EXPR)
4561 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
4562 {
4563 tem = arg0; arg0 = arg1; arg1 = tem;
4564
4565 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
4566 TREE_OPERAND (t, 1) = tem;
4567 }
4568
4569 /* Now WINS is set as described above,
4570 ARG0 is the first operand of EXPR,
4571 and ARG1 is the second operand (if it has more than one operand).
4572
4573 First check for cases where an arithmetic operation is applied to a
4574 compound, conditional, or comparison operation. Push the arithmetic
4575 operation inside the compound or conditional to see if any folding
4576 can then be done. Convert comparison to conditional for this purpose.
4577 The also optimizes non-constant cases that used to be done in
4578 expand_expr.
4579
4580 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
4581 one of the operands is a comparison and the other is a comparison, a
4582 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
4583 code below would make the expression more complex. Change it to a
4584 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
4585 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
4586
4587 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
4588 || code == EQ_EXPR || code == NE_EXPR)
4589 && ((truth_value_p (TREE_CODE (arg0))
4590 && (truth_value_p (TREE_CODE (arg1))
4591 || (TREE_CODE (arg1) == BIT_AND_EXPR
4592 && integer_onep (TREE_OPERAND (arg1, 1)))))
4593 || (truth_value_p (TREE_CODE (arg1))
4594 && (truth_value_p (TREE_CODE (arg0))
4595 || (TREE_CODE (arg0) == BIT_AND_EXPR
4596 && integer_onep (TREE_OPERAND (arg0, 1)))))))
4597 {
4598 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
4599 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
4600 : TRUTH_XOR_EXPR,
4601 type, arg0, arg1));
4602
4603 if (code == EQ_EXPR)
4604 t = invert_truthvalue (t);
4605
4606 return t;
4607 }
4608
4609 if (TREE_CODE_CLASS (code) == '1')
4610 {
4611 if (TREE_CODE (arg0) == COMPOUND_EXPR)
4612 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4613 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
4614 else if (TREE_CODE (arg0) == COND_EXPR)
4615 {
4616 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
4617 fold (build1 (code, type, TREE_OPERAND (arg0, 1))),
4618 fold (build1 (code, type, TREE_OPERAND (arg0, 2)))));
4619
4620 /* If this was a conversion, and all we did was to move into
4621 inside the COND_EXPR, bring it back out. But leave it if
4622 it is a conversion from integer to integer and the
4623 result precision is no wider than a word since such a
4624 conversion is cheap and may be optimized away by combine,
4625 while it couldn't if it were outside the COND_EXPR. Then return
4626 so we don't get into an infinite recursion loop taking the
4627 conversion out and then back in. */
4628
4629 if ((code == NOP_EXPR || code == CONVERT_EXPR
4630 || code == NON_LVALUE_EXPR)
4631 && TREE_CODE (t) == COND_EXPR
4632 && TREE_CODE (TREE_OPERAND (t, 1)) == code
4633 && TREE_CODE (TREE_OPERAND (t, 2)) == code
4634 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
4635 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
4636 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
4637 && (INTEGRAL_TYPE_P
4638 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
4639 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
4640 t = build1 (code, type,
4641 build (COND_EXPR,
4642 TREE_TYPE (TREE_OPERAND
4643 (TREE_OPERAND (t, 1), 0)),
4644 TREE_OPERAND (t, 0),
4645 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
4646 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
4647 return t;
4648 }
4649 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
4650 return fold (build (COND_EXPR, type, arg0,
4651 fold (build1 (code, type, integer_one_node)),
4652 fold (build1 (code, type, integer_zero_node))));
4653 }
4654 else if (TREE_CODE_CLASS (code) == '2'
4655 || TREE_CODE_CLASS (code) == '<')
4656 {
4657 if (TREE_CODE (arg1) == COMPOUND_EXPR)
4658 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4659 fold (build (code, type,
4660 arg0, TREE_OPERAND (arg1, 1))));
4661 else if ((TREE_CODE (arg1) == COND_EXPR
4662 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
4663 && TREE_CODE_CLASS (code) != '<'))
4664 && (TREE_CODE (arg0) != COND_EXPR
4665 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4666 && (! TREE_SIDE_EFFECTS (arg0)
4667 || ((*lang_hooks.decls.global_bindings_p) () == 0
4668 && ! contains_placeholder_p (arg0))))
4669 return
4670 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
4671 /*cond_first_p=*/0);
4672 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
4673 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4674 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4675 else if ((TREE_CODE (arg0) == COND_EXPR
4676 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4677 && TREE_CODE_CLASS (code) != '<'))
4678 && (TREE_CODE (arg1) != COND_EXPR
4679 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4680 && (! TREE_SIDE_EFFECTS (arg1)
4681 || ((*lang_hooks.decls.global_bindings_p) () == 0
4682 && ! contains_placeholder_p (arg1))))
4683 return
4684 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
4685 /*cond_first_p=*/1);
4686 }
4687 else if (TREE_CODE_CLASS (code) == '<'
4688 && TREE_CODE (arg0) == COMPOUND_EXPR)
4689 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4690 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4691 else if (TREE_CODE_CLASS (code) == '<'
4692 && TREE_CODE (arg1) == COMPOUND_EXPR)
4693 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4694 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
4695
4696 switch (code)
4697 {
4698 case INTEGER_CST:
4699 case REAL_CST:
4700 case VECTOR_CST:
4701 case STRING_CST:
4702 case COMPLEX_CST:
4703 case CONSTRUCTOR:
4704 return t;
4705
4706 case CONST_DECL:
4707 return fold (DECL_INITIAL (t));
4708
4709 case NOP_EXPR:
4710 case FLOAT_EXPR:
4711 case CONVERT_EXPR:
4712 case FIX_TRUNC_EXPR:
4713 /* Other kinds of FIX are not handled properly by fold_convert. */
4714
4715 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
4716 return TREE_OPERAND (t, 0);
4717
4718 /* Handle cases of two conversions in a row. */
4719 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
4720 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
4721 {
4722 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4723 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
4724 tree final_type = TREE_TYPE (t);
4725 int inside_int = INTEGRAL_TYPE_P (inside_type);
4726 int inside_ptr = POINTER_TYPE_P (inside_type);
4727 int inside_float = FLOAT_TYPE_P (inside_type);
4728 unsigned int inside_prec = TYPE_PRECISION (inside_type);
4729 int inside_unsignedp = TREE_UNSIGNED (inside_type);
4730 int inter_int = INTEGRAL_TYPE_P (inter_type);
4731 int inter_ptr = POINTER_TYPE_P (inter_type);
4732 int inter_float = FLOAT_TYPE_P (inter_type);
4733 unsigned int inter_prec = TYPE_PRECISION (inter_type);
4734 int inter_unsignedp = TREE_UNSIGNED (inter_type);
4735 int final_int = INTEGRAL_TYPE_P (final_type);
4736 int final_ptr = POINTER_TYPE_P (final_type);
4737 int final_float = FLOAT_TYPE_P (final_type);
4738 unsigned int final_prec = TYPE_PRECISION (final_type);
4739 int final_unsignedp = TREE_UNSIGNED (final_type);
4740
4741 /* In addition to the cases of two conversions in a row
4742 handled below, if we are converting something to its own
4743 type via an object of identical or wider precision, neither
4744 conversion is needed. */
4745 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
4746 && ((inter_int && final_int) || (inter_float && final_float))
4747 && inter_prec >= final_prec)
4748 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4749
4750 /* Likewise, if the intermediate and final types are either both
4751 float or both integer, we don't need the middle conversion if
4752 it is wider than the final type and doesn't change the signedness
4753 (for integers). Avoid this if the final type is a pointer
4754 since then we sometimes need the inner conversion. Likewise if
4755 the outer has a precision not equal to the size of its mode. */
4756 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
4757 || (inter_float && inside_float))
4758 && inter_prec >= inside_prec
4759 && (inter_float || inter_unsignedp == inside_unsignedp)
4760 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4761 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4762 && ! final_ptr)
4763 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4764
4765 /* If we have a sign-extension of a zero-extended value, we can
4766 replace that by a single zero-extension. */
4767 if (inside_int && inter_int && final_int
4768 && inside_prec < inter_prec && inter_prec < final_prec
4769 && inside_unsignedp && !inter_unsignedp)
4770 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4771
4772 /* Two conversions in a row are not needed unless:
4773 - some conversion is floating-point (overstrict for now), or
4774 - the intermediate type is narrower than both initial and
4775 final, or
4776 - the intermediate type and innermost type differ in signedness,
4777 and the outermost type is wider than the intermediate, or
4778 - the initial type is a pointer type and the precisions of the
4779 intermediate and final types differ, or
4780 - the final type is a pointer type and the precisions of the
4781 initial and intermediate types differ. */
4782 if (! inside_float && ! inter_float && ! final_float
4783 && (inter_prec > inside_prec || inter_prec > final_prec)
4784 && ! (inside_int && inter_int
4785 && inter_unsignedp != inside_unsignedp
4786 && inter_prec < final_prec)
4787 && ((inter_unsignedp && inter_prec > inside_prec)
4788 == (final_unsignedp && final_prec > inter_prec))
4789 && ! (inside_ptr && inter_prec != final_prec)
4790 && ! (final_ptr && inside_prec != inter_prec)
4791 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4792 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4793 && ! final_ptr)
4794 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4795 }
4796
4797 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
4798 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
4799 /* Detect assigning a bitfield. */
4800 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
4801 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
4802 {
4803 /* Don't leave an assignment inside a conversion
4804 unless assigning a bitfield. */
4805 tree prev = TREE_OPERAND (t, 0);
4806 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
4807 /* First do the assignment, then return converted constant. */
4808 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
4809 TREE_USED (t) = 1;
4810 return t;
4811 }
4812
4813 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
4814 constants (if x has signed type, the sign bit cannot be set
4815 in c). This folds extension into the BIT_AND_EXPR. */
4816 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
4817 && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
4818 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
4819 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
4820 {
4821 tree and = TREE_OPERAND (t, 0);
4822 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
4823 int change = 0;
4824
4825 if (TREE_UNSIGNED (TREE_TYPE (and))
4826 || (TYPE_PRECISION (TREE_TYPE (t))
4827 <= TYPE_PRECISION (TREE_TYPE (and))))
4828 change = 1;
4829 else if (TYPE_PRECISION (TREE_TYPE (and1))
4830 <= HOST_BITS_PER_WIDE_INT
4831 && host_integerp (and1, 1))
4832 {
4833 unsigned HOST_WIDE_INT cst;
4834
4835 cst = tree_low_cst (and1, 1);
4836 cst &= (HOST_WIDE_INT) -1
4837 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
4838 change = (cst == 0);
4839 #ifdef LOAD_EXTEND_OP
4840 if (change
4841 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
4842 == ZERO_EXTEND))
4843 {
4844 tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
4845 and0 = convert (uns, and0);
4846 and1 = convert (uns, and1);
4847 }
4848 #endif
4849 }
4850 if (change)
4851 return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
4852 convert (TREE_TYPE (t), and0),
4853 convert (TREE_TYPE (t), and1)));
4854 }
4855
4856 if (!wins)
4857 {
4858 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
4859 return t;
4860 }
4861 return fold_convert (t, arg0);
4862
4863 case VIEW_CONVERT_EXPR:
4864 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
4865 return build1 (VIEW_CONVERT_EXPR, type,
4866 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4867 return t;
4868
4869 case COMPONENT_REF:
4870 if (TREE_CODE (arg0) == CONSTRUCTOR)
4871 {
4872 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
4873 if (m)
4874 t = TREE_VALUE (m);
4875 }
4876 return t;
4877
4878 case RANGE_EXPR:
4879 TREE_CONSTANT (t) = wins;
4880 return t;
4881
4882 case NEGATE_EXPR:
4883 if (wins)
4884 {
4885 if (TREE_CODE (arg0) == INTEGER_CST)
4886 {
4887 unsigned HOST_WIDE_INT low;
4888 HOST_WIDE_INT high;
4889 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
4890 TREE_INT_CST_HIGH (arg0),
4891 &low, &high);
4892 t = build_int_2 (low, high);
4893 TREE_TYPE (t) = type;
4894 TREE_OVERFLOW (t)
4895 = (TREE_OVERFLOW (arg0)
4896 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
4897 TREE_CONSTANT_OVERFLOW (t)
4898 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
4899 }
4900 else if (TREE_CODE (arg0) == REAL_CST)
4901 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
4902 }
4903 else if (TREE_CODE (arg0) == NEGATE_EXPR)
4904 return TREE_OPERAND (arg0, 0);
4905
4906 /* Convert - (a - b) to (b - a) for non-floating-point. */
4907 else if (TREE_CODE (arg0) == MINUS_EXPR
4908 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
4909 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
4910 TREE_OPERAND (arg0, 0));
4911
4912 return t;
4913
4914 case ABS_EXPR:
4915 if (wins)
4916 {
4917 if (TREE_CODE (arg0) == INTEGER_CST)
4918 {
4919 /* If the value is unsigned, then the absolute value is
4920 the same as the ordinary value. */
4921 if (TREE_UNSIGNED (type))
4922 return arg0;
4923 /* Similarly, if the value is non-negative. */
4924 else if (INT_CST_LT (integer_minus_one_node, arg0))
4925 return arg0;
4926 /* If the value is negative, then the absolute value is
4927 its negation. */
4928 else
4929 {
4930 unsigned HOST_WIDE_INT low;
4931 HOST_WIDE_INT high;
4932 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
4933 TREE_INT_CST_HIGH (arg0),
4934 &low, &high);
4935 t = build_int_2 (low, high);
4936 TREE_TYPE (t) = type;
4937 TREE_OVERFLOW (t)
4938 = (TREE_OVERFLOW (arg0)
4939 | force_fit_type (t, overflow));
4940 TREE_CONSTANT_OVERFLOW (t)
4941 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
4942 }
4943 }
4944 else if (TREE_CODE (arg0) == REAL_CST)
4945 {
4946 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
4947 t = build_real (type,
4948 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
4949 }
4950 }
4951 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
4952 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
4953 return t;
4954
4955 case CONJ_EXPR:
4956 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
4957 return convert (type, arg0);
4958 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
4959 return build (COMPLEX_EXPR, type,
4960 TREE_OPERAND (arg0, 0),
4961 negate_expr (TREE_OPERAND (arg0, 1)));
4962 else if (TREE_CODE (arg0) == COMPLEX_CST)
4963 return build_complex (type, TREE_REALPART (arg0),
4964 negate_expr (TREE_IMAGPART (arg0)));
4965 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
4966 return fold (build (TREE_CODE (arg0), type,
4967 fold (build1 (CONJ_EXPR, type,
4968 TREE_OPERAND (arg0, 0))),
4969 fold (build1 (CONJ_EXPR,
4970 type, TREE_OPERAND (arg0, 1)))));
4971 else if (TREE_CODE (arg0) == CONJ_EXPR)
4972 return TREE_OPERAND (arg0, 0);
4973 return t;
4974
4975 case BIT_NOT_EXPR:
4976 if (wins)
4977 {
4978 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
4979 ~ TREE_INT_CST_HIGH (arg0));
4980 TREE_TYPE (t) = type;
4981 force_fit_type (t, 0);
4982 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
4983 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
4984 }
4985 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
4986 return TREE_OPERAND (arg0, 0);
4987 return t;
4988
4989 case PLUS_EXPR:
4990 /* A + (-B) -> A - B */
4991 if (TREE_CODE (arg1) == NEGATE_EXPR)
4992 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
4993 /* (-A) + B -> B - A */
4994 if (TREE_CODE (arg0) == NEGATE_EXPR)
4995 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
4996 else if (! FLOAT_TYPE_P (type))
4997 {
4998 if (integer_zerop (arg1))
4999 return non_lvalue (convert (type, arg0));
5000
5001 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5002 with a constant, and the two constants have no bits in common,
5003 we should treat this as a BIT_IOR_EXPR since this may produce more
5004 simplifications. */
5005 if (TREE_CODE (arg0) == BIT_AND_EXPR
5006 && TREE_CODE (arg1) == BIT_AND_EXPR
5007 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5008 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5009 && integer_zerop (const_binop (BIT_AND_EXPR,
5010 TREE_OPERAND (arg0, 1),
5011 TREE_OPERAND (arg1, 1), 0)))
5012 {
5013 code = BIT_IOR_EXPR;
5014 goto bit_ior;
5015 }
5016
5017 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5018 (plus (plus (mult) (mult)) (foo)) so that we can
5019 take advantage of the factoring cases below. */
5020 if ((TREE_CODE (arg0) == PLUS_EXPR
5021 && TREE_CODE (arg1) == MULT_EXPR)
5022 || (TREE_CODE (arg1) == PLUS_EXPR
5023 && TREE_CODE (arg0) == MULT_EXPR))
5024 {
5025 tree parg0, parg1, parg, marg;
5026
5027 if (TREE_CODE (arg0) == PLUS_EXPR)
5028 parg = arg0, marg = arg1;
5029 else
5030 parg = arg1, marg = arg0;
5031 parg0 = TREE_OPERAND (parg, 0);
5032 parg1 = TREE_OPERAND (parg, 1);
5033 STRIP_NOPS (parg0);
5034 STRIP_NOPS (parg1);
5035
5036 if (TREE_CODE (parg0) == MULT_EXPR
5037 && TREE_CODE (parg1) != MULT_EXPR)
5038 return fold (build (PLUS_EXPR, type,
5039 fold (build (PLUS_EXPR, type, parg0, marg)),
5040 parg1));
5041 if (TREE_CODE (parg0) != MULT_EXPR
5042 && TREE_CODE (parg1) == MULT_EXPR)
5043 return fold (build (PLUS_EXPR, type,
5044 fold (build (PLUS_EXPR, type, parg1, marg)),
5045 parg0));
5046 }
5047
5048 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5049 {
5050 tree arg00, arg01, arg10, arg11;
5051 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5052
5053 /* (A * C) + (B * C) -> (A+B) * C.
5054 We are most concerned about the case where C is a constant,
5055 but other combinations show up during loop reduction. Since
5056 it is not difficult, try all four possibilities. */
5057
5058 arg00 = TREE_OPERAND (arg0, 0);
5059 arg01 = TREE_OPERAND (arg0, 1);
5060 arg10 = TREE_OPERAND (arg1, 0);
5061 arg11 = TREE_OPERAND (arg1, 1);
5062 same = NULL_TREE;
5063
5064 if (operand_equal_p (arg01, arg11, 0))
5065 same = arg01, alt0 = arg00, alt1 = arg10;
5066 else if (operand_equal_p (arg00, arg10, 0))
5067 same = arg00, alt0 = arg01, alt1 = arg11;
5068 else if (operand_equal_p (arg00, arg11, 0))
5069 same = arg00, alt0 = arg01, alt1 = arg10;
5070 else if (operand_equal_p (arg01, arg10, 0))
5071 same = arg01, alt0 = arg00, alt1 = arg11;
5072
5073 /* No identical multiplicands; see if we can find a common
5074 power-of-two factor in non-power-of-two multiplies. This
5075 can help in multi-dimensional array access. */
5076 else if (TREE_CODE (arg01) == INTEGER_CST
5077 && TREE_CODE (arg11) == INTEGER_CST
5078 && TREE_INT_CST_HIGH (arg01) == 0
5079 && TREE_INT_CST_HIGH (arg11) == 0)
5080 {
5081 HOST_WIDE_INT int01, int11, tmp;
5082 int01 = TREE_INT_CST_LOW (arg01);
5083 int11 = TREE_INT_CST_LOW (arg11);
5084
5085 /* Move min of absolute values to int11. */
5086 if ((int01 >= 0 ? int01 : -int01)
5087 < (int11 >= 0 ? int11 : -int11))
5088 {
5089 tmp = int01, int01 = int11, int11 = tmp;
5090 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5091 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5092 }
5093
5094 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5095 {
5096 alt0 = fold (build (MULT_EXPR, type, arg00,
5097 build_int_2 (int01 / int11, 0)));
5098 alt1 = arg10;
5099 same = arg11;
5100 }
5101 }
5102
5103 if (same)
5104 return fold (build (MULT_EXPR, type,
5105 fold (build (PLUS_EXPR, type, alt0, alt1)),
5106 same));
5107 }
5108 }
5109
5110 /* See if ARG1 is zero and X + ARG1 reduces to X. */
5111 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
5112 return non_lvalue (convert (type, arg0));
5113
5114 /* Likewise if the operands are reversed. */
5115 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5116 return non_lvalue (convert (type, arg1));
5117
5118 bit_rotate:
5119 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5120 is a rotate of A by C1 bits. */
5121 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5122 is a rotate of A by B bits. */
5123 {
5124 enum tree_code code0, code1;
5125 code0 = TREE_CODE (arg0);
5126 code1 = TREE_CODE (arg1);
5127 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5128 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5129 && operand_equal_p (TREE_OPERAND (arg0, 0),
5130 TREE_OPERAND (arg1, 0), 0)
5131 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5132 {
5133 tree tree01, tree11;
5134 enum tree_code code01, code11;
5135
5136 tree01 = TREE_OPERAND (arg0, 1);
5137 tree11 = TREE_OPERAND (arg1, 1);
5138 STRIP_NOPS (tree01);
5139 STRIP_NOPS (tree11);
5140 code01 = TREE_CODE (tree01);
5141 code11 = TREE_CODE (tree11);
5142 if (code01 == INTEGER_CST
5143 && code11 == INTEGER_CST
5144 && TREE_INT_CST_HIGH (tree01) == 0
5145 && TREE_INT_CST_HIGH (tree11) == 0
5146 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5147 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5148 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5149 code0 == LSHIFT_EXPR ? tree01 : tree11);
5150 else if (code11 == MINUS_EXPR)
5151 {
5152 tree tree110, tree111;
5153 tree110 = TREE_OPERAND (tree11, 0);
5154 tree111 = TREE_OPERAND (tree11, 1);
5155 STRIP_NOPS (tree110);
5156 STRIP_NOPS (tree111);
5157 if (TREE_CODE (tree110) == INTEGER_CST
5158 && 0 == compare_tree_int (tree110,
5159 TYPE_PRECISION
5160 (TREE_TYPE (TREE_OPERAND
5161 (arg0, 0))))
5162 && operand_equal_p (tree01, tree111, 0))
5163 return build ((code0 == LSHIFT_EXPR
5164 ? LROTATE_EXPR
5165 : RROTATE_EXPR),
5166 type, TREE_OPERAND (arg0, 0), tree01);
5167 }
5168 else if (code01 == MINUS_EXPR)
5169 {
5170 tree tree010, tree011;
5171 tree010 = TREE_OPERAND (tree01, 0);
5172 tree011 = TREE_OPERAND (tree01, 1);
5173 STRIP_NOPS (tree010);
5174 STRIP_NOPS (tree011);
5175 if (TREE_CODE (tree010) == INTEGER_CST
5176 && 0 == compare_tree_int (tree010,
5177 TYPE_PRECISION
5178 (TREE_TYPE (TREE_OPERAND
5179 (arg0, 0))))
5180 && operand_equal_p (tree11, tree011, 0))
5181 return build ((code0 != LSHIFT_EXPR
5182 ? LROTATE_EXPR
5183 : RROTATE_EXPR),
5184 type, TREE_OPERAND (arg0, 0), tree11);
5185 }
5186 }
5187 }
5188
5189 associate:
5190 /* In most languages, can't associate operations on floats through
5191 parentheses. Rather than remember where the parentheses were, we
5192 don't associate floats at all. It shouldn't matter much. However,
5193 associating multiplications is only very slightly inaccurate, so do
5194 that if -funsafe-math-optimizations is specified. */
5195
5196 if (! wins
5197 && (! FLOAT_TYPE_P (type)
5198 || (flag_unsafe_math_optimizations && code == MULT_EXPR)))
5199 {
5200 tree var0, con0, lit0, minus_lit0;
5201 tree var1, con1, lit1, minus_lit1;
5202
5203 /* Split both trees into variables, constants, and literals. Then
5204 associate each group together, the constants with literals,
5205 then the result with variables. This increases the chances of
5206 literals being recombined later and of generating relocatable
5207 expressions for the sum of a constant and literal. */
5208 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
5209 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
5210 code == MINUS_EXPR);
5211
5212 /* Only do something if we found more than two objects. Otherwise,
5213 nothing has changed and we risk infinite recursion. */
5214 if (2 < ((var0 != 0) + (var1 != 0)
5215 + (con0 != 0) + (con1 != 0)
5216 + (lit0 != 0) + (lit1 != 0)
5217 + (minus_lit0 != 0) + (minus_lit1 != 0)))
5218 {
5219 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
5220 if (code == MINUS_EXPR)
5221 code = PLUS_EXPR;
5222
5223 var0 = associate_trees (var0, var1, code, type);
5224 con0 = associate_trees (con0, con1, code, type);
5225 lit0 = associate_trees (lit0, lit1, code, type);
5226 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
5227
5228 /* Preserve the MINUS_EXPR if the negative part of the literal is
5229 greater than the positive part. Otherwise, the multiplicative
5230 folding code (i.e extract_muldiv) may be fooled in case
5231 unsigned constants are substracted, like in the following
5232 example: ((X*2 + 4) - 8U)/2. */
5233 if (minus_lit0 && lit0)
5234 {
5235 if (tree_int_cst_lt (lit0, minus_lit0))
5236 {
5237 minus_lit0 = associate_trees (minus_lit0, lit0,
5238 MINUS_EXPR, type);
5239 lit0 = 0;
5240 }
5241 else
5242 {
5243 lit0 = associate_trees (lit0, minus_lit0,
5244 MINUS_EXPR, type);
5245 minus_lit0 = 0;
5246 }
5247 }
5248 if (minus_lit0)
5249 {
5250 if (con0 == 0)
5251 return convert (type, associate_trees (var0, minus_lit0,
5252 MINUS_EXPR, type));
5253 else
5254 {
5255 con0 = associate_trees (con0, minus_lit0,
5256 MINUS_EXPR, type);
5257 return convert (type, associate_trees (var0, con0,
5258 PLUS_EXPR, type));
5259 }
5260 }
5261
5262 con0 = associate_trees (con0, lit0, code, type);
5263 return convert (type, associate_trees (var0, con0, code, type));
5264 }
5265 }
5266
5267 binary:
5268 if (wins)
5269 t1 = const_binop (code, arg0, arg1, 0);
5270 if (t1 != NULL_TREE)
5271 {
5272 /* The return value should always have
5273 the same type as the original expression. */
5274 if (TREE_TYPE (t1) != TREE_TYPE (t))
5275 t1 = convert (TREE_TYPE (t), t1);
5276
5277 return t1;
5278 }
5279 return t;
5280
5281 case MINUS_EXPR:
5282 /* A - (-B) -> A + B */
5283 if (TREE_CODE (arg1) == NEGATE_EXPR)
5284 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5285 /* (-A) - CST -> (-CST) - A for floating point (what about ints ?) */
5286 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
5287 return
5288 fold (build (MINUS_EXPR, type,
5289 build_real (TREE_TYPE (arg1),
5290 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1))),
5291 TREE_OPERAND (arg0, 0)));
5292
5293 if (! FLOAT_TYPE_P (type))
5294 {
5295 if (! wins && integer_zerop (arg0))
5296 return negate_expr (convert (type, arg1));
5297 if (integer_zerop (arg1))
5298 return non_lvalue (convert (type, arg0));
5299
5300 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
5301 about the case where C is a constant, just try one of the
5302 four possibilities. */
5303
5304 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
5305 && operand_equal_p (TREE_OPERAND (arg0, 1),
5306 TREE_OPERAND (arg1, 1), 0))
5307 return fold (build (MULT_EXPR, type,
5308 fold (build (MINUS_EXPR, type,
5309 TREE_OPERAND (arg0, 0),
5310 TREE_OPERAND (arg1, 0))),
5311 TREE_OPERAND (arg0, 1)));
5312 }
5313
5314 /* See if ARG1 is zero and X - ARG1 reduces to X. */
5315 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
5316 return non_lvalue (convert (type, arg0));
5317
5318 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
5319 ARG0 is zero and X + ARG0 reduces to X, since that would mean
5320 (-ARG1 + ARG0) reduces to -ARG1. */
5321 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5322 return negate_expr (convert (type, arg1));
5323
5324 /* Fold &x - &x. This can happen from &x.foo - &x.
5325 This is unsafe for certain floats even in non-IEEE formats.
5326 In IEEE, it is unsafe because it does wrong for NaNs.
5327 Also note that operand_equal_p is always false if an operand
5328 is volatile. */
5329
5330 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
5331 && operand_equal_p (arg0, arg1, 0))
5332 return convert (type, integer_zero_node);
5333
5334 goto associate;
5335
5336 case MULT_EXPR:
5337 /* (-A) * (-B) -> A * B */
5338 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5339 return fold (build (MULT_EXPR, type, TREE_OPERAND (arg0, 0),
5340 TREE_OPERAND (arg1, 0)));
5341
5342 if (! FLOAT_TYPE_P (type))
5343 {
5344 if (integer_zerop (arg1))
5345 return omit_one_operand (type, arg1, arg0);
5346 if (integer_onep (arg1))
5347 return non_lvalue (convert (type, arg0));
5348
5349 /* (a * (1 << b)) is (a << b) */
5350 if (TREE_CODE (arg1) == LSHIFT_EXPR
5351 && integer_onep (TREE_OPERAND (arg1, 0)))
5352 return fold (build (LSHIFT_EXPR, type, arg0,
5353 TREE_OPERAND (arg1, 1)));
5354 if (TREE_CODE (arg0) == LSHIFT_EXPR
5355 && integer_onep (TREE_OPERAND (arg0, 0)))
5356 return fold (build (LSHIFT_EXPR, type, arg1,
5357 TREE_OPERAND (arg0, 1)));
5358
5359 if (TREE_CODE (arg1) == INTEGER_CST
5360 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5361 code, NULL_TREE)))
5362 return convert (type, tem);
5363
5364 }
5365 else
5366 {
5367 /* Maybe fold x * 0 to 0. The expressions aren't the same
5368 when x is NaN, since x * 0 is also NaN. Nor are they the
5369 same in modes with signed zeros, since multiplying a
5370 negative value by 0 gives -0, not +0. */
5371 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
5372 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
5373 && real_zerop (arg1))
5374 return omit_one_operand (type, arg1, arg0);
5375 /* In IEEE floating point, x*1 is not equivalent to x for snans.
5376 However, ANSI says we can drop signals,
5377 so we can do this anyway. */
5378 if (real_onep (arg1))
5379 return non_lvalue (convert (type, arg0));
5380
5381 /* Transform x * -1.0 into -x. This should be safe for NaNs,
5382 signed zeros and signed infinities, but is currently
5383 restricted to "unsafe math optimizations" just in case. */
5384 if (flag_unsafe_math_optimizations
5385 && real_minus_onep (arg1))
5386 return fold (build1 (NEGATE_EXPR, type, arg0));
5387
5388 /* x*2 is x+x */
5389 if (! wins && real_twop (arg1)
5390 && (*lang_hooks.decls.global_bindings_p) () == 0
5391 && ! contains_placeholder_p (arg0))
5392 {
5393 tree arg = save_expr (arg0);
5394 return build (PLUS_EXPR, type, arg, arg);
5395 }
5396 }
5397 goto associate;
5398
5399 case BIT_IOR_EXPR:
5400 bit_ior:
5401 if (integer_all_onesp (arg1))
5402 return omit_one_operand (type, arg1, arg0);
5403 if (integer_zerop (arg1))
5404 return non_lvalue (convert (type, arg0));
5405 t1 = distribute_bit_expr (code, type, arg0, arg1);
5406 if (t1 != NULL_TREE)
5407 return t1;
5408
5409 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
5410
5411 This results in more efficient code for machines without a NAND
5412 instruction. Combine will canonicalize to the first form
5413 which will allow use of NAND instructions provided by the
5414 backend if they exist. */
5415 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5416 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5417 {
5418 return fold (build1 (BIT_NOT_EXPR, type,
5419 build (BIT_AND_EXPR, type,
5420 TREE_OPERAND (arg0, 0),
5421 TREE_OPERAND (arg1, 0))));
5422 }
5423
5424 /* See if this can be simplified into a rotate first. If that
5425 is unsuccessful continue in the association code. */
5426 goto bit_rotate;
5427
5428 case BIT_XOR_EXPR:
5429 if (integer_zerop (arg1))
5430 return non_lvalue (convert (type, arg0));
5431 if (integer_all_onesp (arg1))
5432 return fold (build1 (BIT_NOT_EXPR, type, arg0));
5433
5434 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
5435 with a constant, and the two constants have no bits in common,
5436 we should treat this as a BIT_IOR_EXPR since this may produce more
5437 simplifications. */
5438 if (TREE_CODE (arg0) == BIT_AND_EXPR
5439 && TREE_CODE (arg1) == BIT_AND_EXPR
5440 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5441 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5442 && integer_zerop (const_binop (BIT_AND_EXPR,
5443 TREE_OPERAND (arg0, 1),
5444 TREE_OPERAND (arg1, 1), 0)))
5445 {
5446 code = BIT_IOR_EXPR;
5447 goto bit_ior;
5448 }
5449
5450 /* See if this can be simplified into a rotate first. If that
5451 is unsuccessful continue in the association code. */
5452 goto bit_rotate;
5453
5454 case BIT_AND_EXPR:
5455 bit_and:
5456 if (integer_all_onesp (arg1))
5457 return non_lvalue (convert (type, arg0));
5458 if (integer_zerop (arg1))
5459 return omit_one_operand (type, arg1, arg0);
5460 t1 = distribute_bit_expr (code, type, arg0, arg1);
5461 if (t1 != NULL_TREE)
5462 return t1;
5463 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
5464 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
5465 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5466 {
5467 unsigned int prec
5468 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
5469
5470 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
5471 && (~TREE_INT_CST_LOW (arg1)
5472 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
5473 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
5474 }
5475
5476 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
5477
5478 This results in more efficient code for machines without a NOR
5479 instruction. Combine will canonicalize to the first form
5480 which will allow use of NOR instructions provided by the
5481 backend if they exist. */
5482 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5483 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5484 {
5485 return fold (build1 (BIT_NOT_EXPR, type,
5486 build (BIT_IOR_EXPR, type,
5487 TREE_OPERAND (arg0, 0),
5488 TREE_OPERAND (arg1, 0))));
5489 }
5490
5491 goto associate;
5492
5493 case BIT_ANDTC_EXPR:
5494 if (integer_all_onesp (arg0))
5495 return non_lvalue (convert (type, arg1));
5496 if (integer_zerop (arg0))
5497 return omit_one_operand (type, arg0, arg1);
5498 if (TREE_CODE (arg1) == INTEGER_CST)
5499 {
5500 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
5501 code = BIT_AND_EXPR;
5502 goto bit_and;
5503 }
5504 goto binary;
5505
5506 case RDIV_EXPR:
5507 /* Don't touch a floating-point divide by zero unless the mode
5508 of the constant can represent infinity. */
5509 if (TREE_CODE (arg1) == REAL_CST
5510 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
5511 && real_zerop (arg1))
5512 return t;
5513
5514 /* (-A) / (-B) -> A / B */
5515 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5516 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
5517 TREE_OPERAND (arg1, 0)));
5518
5519 /* In IEEE floating point, x/1 is not equivalent to x for snans.
5520 However, ANSI says we can drop signals, so we can do this anyway. */
5521 if (real_onep (arg1))
5522 return non_lvalue (convert (type, arg0));
5523
5524 /* If ARG1 is a constant, we can convert this to a multiply by the
5525 reciprocal. This does not have the same rounding properties,
5526 so only do this if -funsafe-math-optimizations. We can actually
5527 always safely do it if ARG1 is a power of two, but it's hard to
5528 tell if it is or not in a portable manner. */
5529 if (TREE_CODE (arg1) == REAL_CST)
5530 {
5531 if (flag_unsafe_math_optimizations
5532 && 0 != (tem = const_binop (code, build_real (type, dconst1),
5533 arg1, 0)))
5534 return fold (build (MULT_EXPR, type, arg0, tem));
5535 /* Find the reciprocal if optimizing and the result is exact. */
5536 else if (optimize)
5537 {
5538 REAL_VALUE_TYPE r;
5539 r = TREE_REAL_CST (arg1);
5540 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
5541 {
5542 tem = build_real (type, r);
5543 return fold (build (MULT_EXPR, type, arg0, tem));
5544 }
5545 }
5546 }
5547 /* Convert A/B/C to A/(B*C). */
5548 if (flag_unsafe_math_optimizations
5549 && TREE_CODE (arg0) == RDIV_EXPR)
5550 {
5551 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
5552 build (MULT_EXPR, type, TREE_OPERAND (arg0, 1),
5553 arg1)));
5554 }
5555 /* Convert A/(B/C) to (A/B)*C. */
5556 if (flag_unsafe_math_optimizations
5557 && TREE_CODE (arg1) == RDIV_EXPR)
5558 {
5559 return fold (build (MULT_EXPR, type,
5560 build (RDIV_EXPR, type, arg0,
5561 TREE_OPERAND (arg1, 0)),
5562 TREE_OPERAND (arg1, 1)));
5563 }
5564 goto binary;
5565
5566 case TRUNC_DIV_EXPR:
5567 case ROUND_DIV_EXPR:
5568 case FLOOR_DIV_EXPR:
5569 case CEIL_DIV_EXPR:
5570 case EXACT_DIV_EXPR:
5571 if (integer_onep (arg1))
5572 return non_lvalue (convert (type, arg0));
5573 if (integer_zerop (arg1))
5574 return t;
5575
5576 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
5577 operation, EXACT_DIV_EXPR.
5578
5579 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
5580 At one time others generated faster code, it's not clear if they do
5581 after the last round to changes to the DIV code in expmed.c. */
5582 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
5583 && multiple_of_p (type, arg0, arg1))
5584 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
5585
5586 if (TREE_CODE (arg1) == INTEGER_CST
5587 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5588 code, NULL_TREE)))
5589 return convert (type, tem);
5590
5591 goto binary;
5592
5593 case CEIL_MOD_EXPR:
5594 case FLOOR_MOD_EXPR:
5595 case ROUND_MOD_EXPR:
5596 case TRUNC_MOD_EXPR:
5597 if (integer_onep (arg1))
5598 return omit_one_operand (type, integer_zero_node, arg0);
5599 if (integer_zerop (arg1))
5600 return t;
5601
5602 if (TREE_CODE (arg1) == INTEGER_CST
5603 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5604 code, NULL_TREE)))
5605 return convert (type, tem);
5606
5607 goto binary;
5608
5609 case LSHIFT_EXPR:
5610 case RSHIFT_EXPR:
5611 case LROTATE_EXPR:
5612 case RROTATE_EXPR:
5613 if (integer_zerop (arg1))
5614 return non_lvalue (convert (type, arg0));
5615 /* Since negative shift count is not well-defined,
5616 don't try to compute it in the compiler. */
5617 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
5618 return t;
5619 /* Rewrite an LROTATE_EXPR by a constant into an
5620 RROTATE_EXPR by a new constant. */
5621 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
5622 {
5623 TREE_SET_CODE (t, RROTATE_EXPR);
5624 code = RROTATE_EXPR;
5625 TREE_OPERAND (t, 1) = arg1
5626 = const_binop
5627 (MINUS_EXPR,
5628 convert (TREE_TYPE (arg1),
5629 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
5630 arg1, 0);
5631 if (tree_int_cst_sgn (arg1) < 0)
5632 return t;
5633 }
5634
5635 /* If we have a rotate of a bit operation with the rotate count and
5636 the second operand of the bit operation both constant,
5637 permute the two operations. */
5638 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5639 && (TREE_CODE (arg0) == BIT_AND_EXPR
5640 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
5641 || TREE_CODE (arg0) == BIT_IOR_EXPR
5642 || TREE_CODE (arg0) == BIT_XOR_EXPR)
5643 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
5644 return fold (build (TREE_CODE (arg0), type,
5645 fold (build (code, type,
5646 TREE_OPERAND (arg0, 0), arg1)),
5647 fold (build (code, type,
5648 TREE_OPERAND (arg0, 1), arg1))));
5649
5650 /* Two consecutive rotates adding up to the width of the mode can
5651 be ignored. */
5652 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5653 && TREE_CODE (arg0) == RROTATE_EXPR
5654 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5655 && TREE_INT_CST_HIGH (arg1) == 0
5656 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
5657 && ((TREE_INT_CST_LOW (arg1)
5658 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
5659 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
5660 return TREE_OPERAND (arg0, 0);
5661
5662 goto binary;
5663
5664 case MIN_EXPR:
5665 if (operand_equal_p (arg0, arg1, 0))
5666 return omit_one_operand (type, arg0, arg1);
5667 if (INTEGRAL_TYPE_P (type)
5668 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
5669 return omit_one_operand (type, arg1, arg0);
5670 goto associate;
5671
5672 case MAX_EXPR:
5673 if (operand_equal_p (arg0, arg1, 0))
5674 return omit_one_operand (type, arg0, arg1);
5675 if (INTEGRAL_TYPE_P (type)
5676 && TYPE_MAX_VALUE (type)
5677 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
5678 return omit_one_operand (type, arg1, arg0);
5679 goto associate;
5680
5681 case TRUTH_NOT_EXPR:
5682 /* Note that the operand of this must be an int
5683 and its values must be 0 or 1.
5684 ("true" is a fixed value perhaps depending on the language,
5685 but we don't handle values other than 1 correctly yet.) */
5686 tem = invert_truthvalue (arg0);
5687 /* Avoid infinite recursion. */
5688 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
5689 return t;
5690 return convert (type, tem);
5691
5692 case TRUTH_ANDIF_EXPR:
5693 /* Note that the operands of this must be ints
5694 and their values must be 0 or 1.
5695 ("true" is a fixed value perhaps depending on the language.) */
5696 /* If first arg is constant zero, return it. */
5697 if (integer_zerop (arg0))
5698 return convert (type, arg0);
5699 case TRUTH_AND_EXPR:
5700 /* If either arg is constant true, drop it. */
5701 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5702 return non_lvalue (convert (type, arg1));
5703 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
5704 /* Preserve sequence points. */
5705 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
5706 return non_lvalue (convert (type, arg0));
5707 /* If second arg is constant zero, result is zero, but first arg
5708 must be evaluated. */
5709 if (integer_zerop (arg1))
5710 return omit_one_operand (type, arg1, arg0);
5711 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
5712 case will be handled here. */
5713 if (integer_zerop (arg0))
5714 return omit_one_operand (type, arg0, arg1);
5715
5716 truth_andor:
5717 /* We only do these simplifications if we are optimizing. */
5718 if (!optimize)
5719 return t;
5720
5721 /* Check for things like (A || B) && (A || C). We can convert this
5722 to A || (B && C). Note that either operator can be any of the four
5723 truth and/or operations and the transformation will still be
5724 valid. Also note that we only care about order for the
5725 ANDIF and ORIF operators. If B contains side effects, this
5726 might change the truth-value of A. */
5727 if (TREE_CODE (arg0) == TREE_CODE (arg1)
5728 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
5729 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
5730 || TREE_CODE (arg0) == TRUTH_AND_EXPR
5731 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
5732 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
5733 {
5734 tree a00 = TREE_OPERAND (arg0, 0);
5735 tree a01 = TREE_OPERAND (arg0, 1);
5736 tree a10 = TREE_OPERAND (arg1, 0);
5737 tree a11 = TREE_OPERAND (arg1, 1);
5738 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
5739 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
5740 && (code == TRUTH_AND_EXPR
5741 || code == TRUTH_OR_EXPR));
5742
5743 if (operand_equal_p (a00, a10, 0))
5744 return fold (build (TREE_CODE (arg0), type, a00,
5745 fold (build (code, type, a01, a11))));
5746 else if (commutative && operand_equal_p (a00, a11, 0))
5747 return fold (build (TREE_CODE (arg0), type, a00,
5748 fold (build (code, type, a01, a10))));
5749 else if (commutative && operand_equal_p (a01, a10, 0))
5750 return fold (build (TREE_CODE (arg0), type, a01,
5751 fold (build (code, type, a00, a11))));
5752
5753 /* This case if tricky because we must either have commutative
5754 operators or else A10 must not have side-effects. */
5755
5756 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
5757 && operand_equal_p (a01, a11, 0))
5758 return fold (build (TREE_CODE (arg0), type,
5759 fold (build (code, type, a00, a10)),
5760 a01));
5761 }
5762
5763 /* See if we can build a range comparison. */
5764 if (0 != (tem = fold_range_test (t)))
5765 return tem;
5766
5767 /* Check for the possibility of merging component references. If our
5768 lhs is another similar operation, try to merge its rhs with our
5769 rhs. Then try to merge our lhs and rhs. */
5770 if (TREE_CODE (arg0) == code
5771 && 0 != (tem = fold_truthop (code, type,
5772 TREE_OPERAND (arg0, 1), arg1)))
5773 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
5774
5775 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
5776 return tem;
5777
5778 return t;
5779
5780 case TRUTH_ORIF_EXPR:
5781 /* Note that the operands of this must be ints
5782 and their values must be 0 or true.
5783 ("true" is a fixed value perhaps depending on the language.) */
5784 /* If first arg is constant true, return it. */
5785 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5786 return convert (type, arg0);
5787 case TRUTH_OR_EXPR:
5788 /* If either arg is constant zero, drop it. */
5789 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
5790 return non_lvalue (convert (type, arg1));
5791 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
5792 /* Preserve sequence points. */
5793 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
5794 return non_lvalue (convert (type, arg0));
5795 /* If second arg is constant true, result is true, but we must
5796 evaluate first arg. */
5797 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
5798 return omit_one_operand (type, arg1, arg0);
5799 /* Likewise for first arg, but note this only occurs here for
5800 TRUTH_OR_EXPR. */
5801 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5802 return omit_one_operand (type, arg0, arg1);
5803 goto truth_andor;
5804
5805 case TRUTH_XOR_EXPR:
5806 /* If either arg is constant zero, drop it. */
5807 if (integer_zerop (arg0))
5808 return non_lvalue (convert (type, arg1));
5809 if (integer_zerop (arg1))
5810 return non_lvalue (convert (type, arg0));
5811 /* If either arg is constant true, this is a logical inversion. */
5812 if (integer_onep (arg0))
5813 return non_lvalue (convert (type, invert_truthvalue (arg1)));
5814 if (integer_onep (arg1))
5815 return non_lvalue (convert (type, invert_truthvalue (arg0)));
5816 return t;
5817
5818 case EQ_EXPR:
5819 case NE_EXPR:
5820 case LT_EXPR:
5821 case GT_EXPR:
5822 case LE_EXPR:
5823 case GE_EXPR:
5824 /* If one arg is a real or integer constant, put it last. */
5825 if ((TREE_CODE (arg0) == INTEGER_CST
5826 && TREE_CODE (arg1) != INTEGER_CST)
5827 || (TREE_CODE (arg0) == REAL_CST
5828 && TREE_CODE (arg0) != REAL_CST))
5829 {
5830 TREE_OPERAND (t, 0) = arg1;
5831 TREE_OPERAND (t, 1) = arg0;
5832 arg0 = TREE_OPERAND (t, 0);
5833 arg1 = TREE_OPERAND (t, 1);
5834 code = swap_tree_comparison (code);
5835 TREE_SET_CODE (t, code);
5836 }
5837
5838 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
5839 {
5840 /* (-a) CMP (-b) -> b CMP a */
5841 if (TREE_CODE (arg0) == NEGATE_EXPR
5842 && TREE_CODE (arg1) == NEGATE_EXPR)
5843 return fold (build (code, type, TREE_OPERAND (arg1, 0),
5844 TREE_OPERAND (arg0, 0)));
5845 /* (-a) CMP CST -> a swap(CMP) (-CST) */
5846 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
5847 return
5848 fold (build
5849 (swap_tree_comparison (code), type,
5850 TREE_OPERAND (arg0, 0),
5851 build_real (TREE_TYPE (arg1),
5852 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1)))));
5853 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
5854 /* a CMP (-0) -> a CMP 0 */
5855 if (TREE_CODE (arg1) == REAL_CST
5856 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (arg1)))
5857 return fold (build (code, type, arg0,
5858 build_real (TREE_TYPE (arg1), dconst0)));
5859
5860 /* If this is a comparison of a real constant with a PLUS_EXPR
5861 or a MINUS_EXPR of a real constant, we can convert it into a
5862 comparison with a revised real constant as long as no overflow
5863 occurs when unsafe_math_optimizations are enabled. */
5864 if (flag_unsafe_math_optimizations
5865 && TREE_CODE (arg1) == REAL_CST
5866 && (TREE_CODE (arg0) == PLUS_EXPR
5867 || TREE_CODE (arg0) == MINUS_EXPR)
5868 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
5869 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
5870 ? MINUS_EXPR : PLUS_EXPR,
5871 arg1, TREE_OPERAND (arg0, 1), 0))
5872 && ! TREE_CONSTANT_OVERFLOW (tem))
5873 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
5874 }
5875
5876 /* Convert foo++ == CONST into ++foo == CONST + INCR.
5877 First, see if one arg is constant; find the constant arg
5878 and the other one. */
5879 {
5880 tree constop = 0, varop = NULL_TREE;
5881 int constopnum = -1;
5882
5883 if (TREE_CONSTANT (arg1))
5884 constopnum = 1, constop = arg1, varop = arg0;
5885 if (TREE_CONSTANT (arg0))
5886 constopnum = 0, constop = arg0, varop = arg1;
5887
5888 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
5889 {
5890 /* This optimization is invalid for ordered comparisons
5891 if CONST+INCR overflows or if foo+incr might overflow.
5892 This optimization is invalid for floating point due to rounding.
5893 For pointer types we assume overflow doesn't happen. */
5894 if (POINTER_TYPE_P (TREE_TYPE (varop))
5895 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
5896 && (code == EQ_EXPR || code == NE_EXPR)))
5897 {
5898 tree newconst
5899 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
5900 constop, TREE_OPERAND (varop, 1)));
5901
5902 /* Do not overwrite the current varop to be a preincrement,
5903 create a new node so that we won't confuse our caller who
5904 might create trees and throw them away, reusing the
5905 arguments that they passed to build. This shows up in
5906 the THEN or ELSE parts of ?: being postincrements. */
5907 varop = build (PREINCREMENT_EXPR, TREE_TYPE (varop),
5908 TREE_OPERAND (varop, 0),
5909 TREE_OPERAND (varop, 1));
5910
5911 /* If VAROP is a reference to a bitfield, we must mask
5912 the constant by the width of the field. */
5913 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
5914 && DECL_BIT_FIELD(TREE_OPERAND
5915 (TREE_OPERAND (varop, 0), 1)))
5916 {
5917 int size
5918 = TREE_INT_CST_LOW (DECL_SIZE
5919 (TREE_OPERAND
5920 (TREE_OPERAND (varop, 0), 1)));
5921 tree mask, unsigned_type;
5922 unsigned int precision;
5923 tree folded_compare;
5924
5925 /* First check whether the comparison would come out
5926 always the same. If we don't do that we would
5927 change the meaning with the masking. */
5928 if (constopnum == 0)
5929 folded_compare = fold (build (code, type, constop,
5930 TREE_OPERAND (varop, 0)));
5931 else
5932 folded_compare = fold (build (code, type,
5933 TREE_OPERAND (varop, 0),
5934 constop));
5935 if (integer_zerop (folded_compare)
5936 || integer_onep (folded_compare))
5937 return omit_one_operand (type, folded_compare, varop);
5938
5939 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
5940 precision = TYPE_PRECISION (unsigned_type);
5941 mask = build_int_2 (~0, ~0);
5942 TREE_TYPE (mask) = unsigned_type;
5943 force_fit_type (mask, 0);
5944 mask = const_binop (RSHIFT_EXPR, mask,
5945 size_int (precision - size), 0);
5946 newconst = fold (build (BIT_AND_EXPR,
5947 TREE_TYPE (varop), newconst,
5948 convert (TREE_TYPE (varop),
5949 mask)));
5950 }
5951
5952 t = build (code, type,
5953 (constopnum == 0) ? newconst : varop,
5954 (constopnum == 1) ? newconst : varop);
5955 return t;
5956 }
5957 }
5958 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
5959 {
5960 if (POINTER_TYPE_P (TREE_TYPE (varop))
5961 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
5962 && (code == EQ_EXPR || code == NE_EXPR)))
5963 {
5964 tree newconst
5965 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
5966 constop, TREE_OPERAND (varop, 1)));
5967
5968 /* Do not overwrite the current varop to be a predecrement,
5969 create a new node so that we won't confuse our caller who
5970 might create trees and throw them away, reusing the
5971 arguments that they passed to build. This shows up in
5972 the THEN or ELSE parts of ?: being postdecrements. */
5973 varop = build (PREDECREMENT_EXPR, TREE_TYPE (varop),
5974 TREE_OPERAND (varop, 0),
5975 TREE_OPERAND (varop, 1));
5976
5977 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
5978 && DECL_BIT_FIELD(TREE_OPERAND
5979 (TREE_OPERAND (varop, 0), 1)))
5980 {
5981 int size
5982 = TREE_INT_CST_LOW (DECL_SIZE
5983 (TREE_OPERAND
5984 (TREE_OPERAND (varop, 0), 1)));
5985 tree mask, unsigned_type;
5986 unsigned int precision;
5987 tree folded_compare;
5988
5989 if (constopnum == 0)
5990 folded_compare = fold (build (code, type, constop,
5991 TREE_OPERAND (varop, 0)));
5992 else
5993 folded_compare = fold (build (code, type,
5994 TREE_OPERAND (varop, 0),
5995 constop));
5996 if (integer_zerop (folded_compare)
5997 || integer_onep (folded_compare))
5998 return omit_one_operand (type, folded_compare, varop);
5999
6000 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
6001 precision = TYPE_PRECISION (unsigned_type);
6002 mask = build_int_2 (~0, ~0);
6003 TREE_TYPE (mask) = TREE_TYPE (varop);
6004 force_fit_type (mask, 0);
6005 mask = const_binop (RSHIFT_EXPR, mask,
6006 size_int (precision - size), 0);
6007 newconst = fold (build (BIT_AND_EXPR,
6008 TREE_TYPE (varop), newconst,
6009 convert (TREE_TYPE (varop),
6010 mask)));
6011 }
6012
6013 t = build (code, type,
6014 (constopnum == 0) ? newconst : varop,
6015 (constopnum == 1) ? newconst : varop);
6016 return t;
6017 }
6018 }
6019 }
6020
6021 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
6022 This transformation affects the cases which are handled in later
6023 optimizations involving comparisons with non-negative constants. */
6024 if (TREE_CODE (arg1) == INTEGER_CST
6025 && TREE_CODE (arg0) != INTEGER_CST
6026 && tree_int_cst_sgn (arg1) > 0)
6027 {
6028 switch (code)
6029 {
6030 case GE_EXPR:
6031 code = GT_EXPR;
6032 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6033 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6034 break;
6035
6036 case LT_EXPR:
6037 code = LE_EXPR;
6038 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6039 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6040 break;
6041
6042 default:
6043 break;
6044 }
6045 }
6046
6047 /* Comparisons with the highest or lowest possible integer of
6048 the specified size will have known values. */
6049 {
6050 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
6051
6052 if (TREE_CODE (arg1) == INTEGER_CST
6053 && ! TREE_CONSTANT_OVERFLOW (arg1)
6054 && width <= HOST_BITS_PER_WIDE_INT
6055 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6056 || POINTER_TYPE_P (TREE_TYPE (arg1))))
6057 {
6058 unsigned HOST_WIDE_INT signed_max;
6059 unsigned HOST_WIDE_INT max, min;
6060
6061 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
6062
6063 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6064 {
6065 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
6066 min = 0;
6067 }
6068 else
6069 {
6070 max = signed_max;
6071 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
6072 }
6073
6074 if (TREE_INT_CST_HIGH (arg1) == 0
6075 && TREE_INT_CST_LOW (arg1) == max)
6076 switch (code)
6077 {
6078 case GT_EXPR:
6079 return omit_one_operand (type,
6080 convert (type, integer_zero_node),
6081 arg0);
6082 case GE_EXPR:
6083 code = EQ_EXPR;
6084 TREE_SET_CODE (t, EQ_EXPR);
6085 break;
6086 case LE_EXPR:
6087 return omit_one_operand (type,
6088 convert (type, integer_one_node),
6089 arg0);
6090 case LT_EXPR:
6091 code = NE_EXPR;
6092 TREE_SET_CODE (t, NE_EXPR);
6093 break;
6094
6095 /* The GE_EXPR and LT_EXPR cases above are not normally
6096 reached because of previous transformations. */
6097
6098 default:
6099 break;
6100 }
6101 else if (TREE_INT_CST_HIGH (arg1) == 0
6102 && TREE_INT_CST_LOW (arg1) == max - 1)
6103 switch (code)
6104 {
6105 case GT_EXPR:
6106 code = EQ_EXPR;
6107 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
6108 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6109 break;
6110 case LE_EXPR:
6111 code = NE_EXPR;
6112 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
6113 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6114 break;
6115 default:
6116 break;
6117 }
6118 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
6119 && TREE_INT_CST_LOW (arg1) == min)
6120 switch (code)
6121 {
6122 case LT_EXPR:
6123 return omit_one_operand (type,
6124 convert (type, integer_zero_node),
6125 arg0);
6126 case LE_EXPR:
6127 code = EQ_EXPR;
6128 TREE_SET_CODE (t, EQ_EXPR);
6129 break;
6130
6131 case GE_EXPR:
6132 return omit_one_operand (type,
6133 convert (type, integer_one_node),
6134 arg0);
6135 case GT_EXPR:
6136 code = NE_EXPR;
6137 TREE_SET_CODE (t, NE_EXPR);
6138 break;
6139
6140 default:
6141 break;
6142 }
6143 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
6144 && TREE_INT_CST_LOW (arg1) == min + 1)
6145 switch (code)
6146 {
6147 case GE_EXPR:
6148 code = NE_EXPR;
6149 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6150 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6151 break;
6152 case LT_EXPR:
6153 code = EQ_EXPR;
6154 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6155 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6156 break;
6157 default:
6158 break;
6159 }
6160
6161 else if (TREE_INT_CST_HIGH (arg1) == 0
6162 && TREE_INT_CST_LOW (arg1) == signed_max
6163 && TREE_UNSIGNED (TREE_TYPE (arg1))
6164 /* signed_type does not work on pointer types. */
6165 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
6166 {
6167 /* The following case also applies to X < signed_max+1
6168 and X >= signed_max+1 because previous transformations. */
6169 if (code == LE_EXPR || code == GT_EXPR)
6170 {
6171 tree st0, st1;
6172 st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
6173 st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
6174 return fold
6175 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
6176 type, convert (st0, arg0),
6177 convert (st1, integer_zero_node)));
6178 }
6179 }
6180 }
6181 }
6182
6183 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
6184 a MINUS_EXPR of a constant, we can convert it into a comparison with
6185 a revised constant as long as no overflow occurs. */
6186 if ((code == EQ_EXPR || code == NE_EXPR)
6187 && TREE_CODE (arg1) == INTEGER_CST
6188 && (TREE_CODE (arg0) == PLUS_EXPR
6189 || TREE_CODE (arg0) == MINUS_EXPR)
6190 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6191 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6192 ? MINUS_EXPR : PLUS_EXPR,
6193 arg1, TREE_OPERAND (arg0, 1), 0))
6194 && ! TREE_CONSTANT_OVERFLOW (tem))
6195 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6196
6197 /* Similarly for a NEGATE_EXPR. */
6198 else if ((code == EQ_EXPR || code == NE_EXPR)
6199 && TREE_CODE (arg0) == NEGATE_EXPR
6200 && TREE_CODE (arg1) == INTEGER_CST
6201 && 0 != (tem = negate_expr (arg1))
6202 && TREE_CODE (tem) == INTEGER_CST
6203 && ! TREE_CONSTANT_OVERFLOW (tem))
6204 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6205
6206 /* If we have X - Y == 0, we can convert that to X == Y and similarly
6207 for !=. Don't do this for ordered comparisons due to overflow. */
6208 else if ((code == NE_EXPR || code == EQ_EXPR)
6209 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
6210 return fold (build (code, type,
6211 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
6212
6213 /* If we are widening one operand of an integer comparison,
6214 see if the other operand is similarly being widened. Perhaps we
6215 can do the comparison in the narrower type. */
6216 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
6217 && TREE_CODE (arg0) == NOP_EXPR
6218 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
6219 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
6220 && (TREE_TYPE (t1) == TREE_TYPE (tem)
6221 || (TREE_CODE (t1) == INTEGER_CST
6222 && int_fits_type_p (t1, TREE_TYPE (tem)))))
6223 return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
6224
6225 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
6226 constant, we can simplify it. */
6227 else if (TREE_CODE (arg1) == INTEGER_CST
6228 && (TREE_CODE (arg0) == MIN_EXPR
6229 || TREE_CODE (arg0) == MAX_EXPR)
6230 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6231 return optimize_minmax_comparison (t);
6232
6233 /* If we are comparing an ABS_EXPR with a constant, we can
6234 convert all the cases into explicit comparisons, but they may
6235 well not be faster than doing the ABS and one comparison.
6236 But ABS (X) <= C is a range comparison, which becomes a subtraction
6237 and a comparison, and is probably faster. */
6238 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6239 && TREE_CODE (arg0) == ABS_EXPR
6240 && ! TREE_SIDE_EFFECTS (arg0)
6241 && (0 != (tem = negate_expr (arg1)))
6242 && TREE_CODE (tem) == INTEGER_CST
6243 && ! TREE_CONSTANT_OVERFLOW (tem))
6244 return fold (build (TRUTH_ANDIF_EXPR, type,
6245 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
6246 build (LE_EXPR, type,
6247 TREE_OPERAND (arg0, 0), arg1)));
6248
6249 /* If this is an EQ or NE comparison with zero and ARG0 is
6250 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
6251 two operations, but the latter can be done in one less insn
6252 on machines that have only two-operand insns or on which a
6253 constant cannot be the first operand. */
6254 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
6255 && TREE_CODE (arg0) == BIT_AND_EXPR)
6256 {
6257 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
6258 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
6259 return
6260 fold (build (code, type,
6261 build (BIT_AND_EXPR, TREE_TYPE (arg0),
6262 build (RSHIFT_EXPR,
6263 TREE_TYPE (TREE_OPERAND (arg0, 0)),
6264 TREE_OPERAND (arg0, 1),
6265 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
6266 convert (TREE_TYPE (arg0),
6267 integer_one_node)),
6268 arg1));
6269 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
6270 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
6271 return
6272 fold (build (code, type,
6273 build (BIT_AND_EXPR, TREE_TYPE (arg0),
6274 build (RSHIFT_EXPR,
6275 TREE_TYPE (TREE_OPERAND (arg0, 1)),
6276 TREE_OPERAND (arg0, 0),
6277 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
6278 convert (TREE_TYPE (arg0),
6279 integer_one_node)),
6280 arg1));
6281 }
6282
6283 /* If this is an NE or EQ comparison of zero against the result of a
6284 signed MOD operation whose second operand is a power of 2, make
6285 the MOD operation unsigned since it is simpler and equivalent. */
6286 if ((code == NE_EXPR || code == EQ_EXPR)
6287 && integer_zerop (arg1)
6288 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
6289 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
6290 || TREE_CODE (arg0) == CEIL_MOD_EXPR
6291 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
6292 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
6293 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6294 {
6295 tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
6296 tree newmod = build (TREE_CODE (arg0), newtype,
6297 convert (newtype, TREE_OPERAND (arg0, 0)),
6298 convert (newtype, TREE_OPERAND (arg0, 1)));
6299
6300 return build (code, type, newmod, convert (newtype, arg1));
6301 }
6302
6303 /* If this is an NE comparison of zero with an AND of one, remove the
6304 comparison since the AND will give the correct value. */
6305 if (code == NE_EXPR && integer_zerop (arg1)
6306 && TREE_CODE (arg0) == BIT_AND_EXPR
6307 && integer_onep (TREE_OPERAND (arg0, 1)))
6308 return convert (type, arg0);
6309
6310 /* If we have (A & C) == C where C is a power of 2, convert this into
6311 (A & C) != 0. Similarly for NE_EXPR. */
6312 if ((code == EQ_EXPR || code == NE_EXPR)
6313 && TREE_CODE (arg0) == BIT_AND_EXPR
6314 && integer_pow2p (TREE_OPERAND (arg0, 1))
6315 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
6316 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
6317 arg0, integer_zero_node));
6318
6319 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6320 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6321 if ((code == EQ_EXPR || code == NE_EXPR)
6322 && TREE_CODE (arg0) == BIT_AND_EXPR
6323 && integer_zerop (arg1))
6324 {
6325 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0),
6326 TREE_OPERAND (arg0, 1));
6327 if (arg00 != NULL_TREE)
6328 {
6329 tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
6330 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
6331 convert (stype, arg00),
6332 convert (stype, integer_zero_node)));
6333 }
6334 }
6335
6336 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
6337 and similarly for >= into !=. */
6338 if ((code == LT_EXPR || code == GE_EXPR)
6339 && TREE_UNSIGNED (TREE_TYPE (arg0))
6340 && TREE_CODE (arg1) == LSHIFT_EXPR
6341 && integer_onep (TREE_OPERAND (arg1, 0)))
6342 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6343 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6344 TREE_OPERAND (arg1, 1)),
6345 convert (TREE_TYPE (arg0), integer_zero_node));
6346
6347 else if ((code == LT_EXPR || code == GE_EXPR)
6348 && TREE_UNSIGNED (TREE_TYPE (arg0))
6349 && (TREE_CODE (arg1) == NOP_EXPR
6350 || TREE_CODE (arg1) == CONVERT_EXPR)
6351 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
6352 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
6353 return
6354 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6355 convert (TREE_TYPE (arg0),
6356 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6357 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
6358 convert (TREE_TYPE (arg0), integer_zero_node));
6359
6360 /* Simplify comparison of something with itself. (For IEEE
6361 floating-point, we can only do some of these simplifications.) */
6362 if (operand_equal_p (arg0, arg1, 0))
6363 {
6364 switch (code)
6365 {
6366 case EQ_EXPR:
6367 case GE_EXPR:
6368 case LE_EXPR:
6369 if (! FLOAT_TYPE_P (TREE_TYPE (arg0)))
6370 return constant_boolean_node (1, type);
6371 code = EQ_EXPR;
6372 TREE_SET_CODE (t, code);
6373 break;
6374
6375 case NE_EXPR:
6376 /* For NE, we can only do this simplification if integer. */
6377 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
6378 break;
6379 /* ... fall through ... */
6380 case GT_EXPR:
6381 case LT_EXPR:
6382 return constant_boolean_node (0, type);
6383 default:
6384 abort ();
6385 }
6386 }
6387
6388 /* If we are comparing an expression that just has comparisons
6389 of two integer values, arithmetic expressions of those comparisons,
6390 and constants, we can simplify it. There are only three cases
6391 to check: the two values can either be equal, the first can be
6392 greater, or the second can be greater. Fold the expression for
6393 those three values. Since each value must be 0 or 1, we have
6394 eight possibilities, each of which corresponds to the constant 0
6395 or 1 or one of the six possible comparisons.
6396
6397 This handles common cases like (a > b) == 0 but also handles
6398 expressions like ((x > y) - (y > x)) > 0, which supposedly
6399 occur in macroized code. */
6400
6401 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
6402 {
6403 tree cval1 = 0, cval2 = 0;
6404 int save_p = 0;
6405
6406 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
6407 /* Don't handle degenerate cases here; they should already
6408 have been handled anyway. */
6409 && cval1 != 0 && cval2 != 0
6410 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
6411 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
6412 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
6413 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
6414 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
6415 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
6416 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
6417 {
6418 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
6419 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
6420
6421 /* We can't just pass T to eval_subst in case cval1 or cval2
6422 was the same as ARG1. */
6423
6424 tree high_result
6425 = fold (build (code, type,
6426 eval_subst (arg0, cval1, maxval, cval2, minval),
6427 arg1));
6428 tree equal_result
6429 = fold (build (code, type,
6430 eval_subst (arg0, cval1, maxval, cval2, maxval),
6431 arg1));
6432 tree low_result
6433 = fold (build (code, type,
6434 eval_subst (arg0, cval1, minval, cval2, maxval),
6435 arg1));
6436
6437 /* All three of these results should be 0 or 1. Confirm they
6438 are. Then use those values to select the proper code
6439 to use. */
6440
6441 if ((integer_zerop (high_result)
6442 || integer_onep (high_result))
6443 && (integer_zerop (equal_result)
6444 || integer_onep (equal_result))
6445 && (integer_zerop (low_result)
6446 || integer_onep (low_result)))
6447 {
6448 /* Make a 3-bit mask with the high-order bit being the
6449 value for `>', the next for '=', and the low for '<'. */
6450 switch ((integer_onep (high_result) * 4)
6451 + (integer_onep (equal_result) * 2)
6452 + integer_onep (low_result))
6453 {
6454 case 0:
6455 /* Always false. */
6456 return omit_one_operand (type, integer_zero_node, arg0);
6457 case 1:
6458 code = LT_EXPR;
6459 break;
6460 case 2:
6461 code = EQ_EXPR;
6462 break;
6463 case 3:
6464 code = LE_EXPR;
6465 break;
6466 case 4:
6467 code = GT_EXPR;
6468 break;
6469 case 5:
6470 code = NE_EXPR;
6471 break;
6472 case 6:
6473 code = GE_EXPR;
6474 break;
6475 case 7:
6476 /* Always true. */
6477 return omit_one_operand (type, integer_one_node, arg0);
6478 }
6479
6480 t = build (code, type, cval1, cval2);
6481 if (save_p)
6482 return save_expr (t);
6483 else
6484 return fold (t);
6485 }
6486 }
6487 }
6488
6489 /* If this is a comparison of a field, we may be able to simplify it. */
6490 if ((TREE_CODE (arg0) == COMPONENT_REF
6491 || TREE_CODE (arg0) == BIT_FIELD_REF)
6492 && (code == EQ_EXPR || code == NE_EXPR)
6493 /* Handle the constant case even without -O
6494 to make sure the warnings are given. */
6495 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
6496 {
6497 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
6498 return t1 ? t1 : t;
6499 }
6500
6501 /* If this is a comparison of complex values and either or both sides
6502 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
6503 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
6504 This may prevent needless evaluations. */
6505 if ((code == EQ_EXPR || code == NE_EXPR)
6506 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
6507 && (TREE_CODE (arg0) == COMPLEX_EXPR
6508 || TREE_CODE (arg1) == COMPLEX_EXPR
6509 || TREE_CODE (arg0) == COMPLEX_CST
6510 || TREE_CODE (arg1) == COMPLEX_CST))
6511 {
6512 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
6513 tree real0, imag0, real1, imag1;
6514
6515 arg0 = save_expr (arg0);
6516 arg1 = save_expr (arg1);
6517 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
6518 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
6519 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
6520 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
6521
6522 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
6523 : TRUTH_ORIF_EXPR),
6524 type,
6525 fold (build (code, type, real0, real1)),
6526 fold (build (code, type, imag0, imag1))));
6527 }
6528
6529 /* Optimize comparisons of strlen vs zero to a compare of the
6530 first character of the string vs zero. To wit,
6531 strlen(ptr) == 0 => *ptr == 0
6532 strlen(ptr) != 0 => *ptr != 0
6533 Other cases should reduce to one of these two (or a constant)
6534 due to the return value of strlen being unsigned. */
6535 if ((code == EQ_EXPR || code == NE_EXPR)
6536 && integer_zerop (arg1)
6537 && TREE_CODE (arg0) == CALL_EXPR
6538 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR)
6539 {
6540 tree fndecl = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6541 tree arglist;
6542
6543 if (TREE_CODE (fndecl) == FUNCTION_DECL
6544 && DECL_BUILT_IN (fndecl)
6545 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
6546 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
6547 && (arglist = TREE_OPERAND (arg0, 1))
6548 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
6549 && ! TREE_CHAIN (arglist))
6550 return fold (build (code, type,
6551 build1 (INDIRECT_REF, char_type_node,
6552 TREE_VALUE(arglist)),
6553 integer_zero_node));
6554 }
6555
6556 /* From here on, the only cases we handle are when the result is
6557 known to be a constant.
6558
6559 To compute GT, swap the arguments and do LT.
6560 To compute GE, do LT and invert the result.
6561 To compute LE, swap the arguments, do LT and invert the result.
6562 To compute NE, do EQ and invert the result.
6563
6564 Therefore, the code below must handle only EQ and LT. */
6565
6566 if (code == LE_EXPR || code == GT_EXPR)
6567 {
6568 tem = arg0, arg0 = arg1, arg1 = tem;
6569 code = swap_tree_comparison (code);
6570 }
6571
6572 /* Note that it is safe to invert for real values here because we
6573 will check below in the one case that it matters. */
6574
6575 t1 = NULL_TREE;
6576 invert = 0;
6577 if (code == NE_EXPR || code == GE_EXPR)
6578 {
6579 invert = 1;
6580 code = invert_tree_comparison (code);
6581 }
6582
6583 /* Compute a result for LT or EQ if args permit;
6584 otherwise return T. */
6585 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
6586 {
6587 if (code == EQ_EXPR)
6588 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
6589 else
6590 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
6591 ? INT_CST_LT_UNSIGNED (arg0, arg1)
6592 : INT_CST_LT (arg0, arg1)),
6593 0);
6594 }
6595
6596 #if 0 /* This is no longer useful, but breaks some real code. */
6597 /* Assume a nonexplicit constant cannot equal an explicit one,
6598 since such code would be undefined anyway.
6599 Exception: on sysvr4, using #pragma weak,
6600 a label can come out as 0. */
6601 else if (TREE_CODE (arg1) == INTEGER_CST
6602 && !integer_zerop (arg1)
6603 && TREE_CONSTANT (arg0)
6604 && TREE_CODE (arg0) == ADDR_EXPR
6605 && code == EQ_EXPR)
6606 t1 = build_int_2 (0, 0);
6607 #endif
6608 /* Two real constants can be compared explicitly. */
6609 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
6610 {
6611 /* If either operand is a NaN, the result is false with two
6612 exceptions: First, an NE_EXPR is true on NaNs, but that case
6613 is already handled correctly since we will be inverting the
6614 result for NE_EXPR. Second, if we had inverted a LE_EXPR
6615 or a GE_EXPR into a LT_EXPR, we must return true so that it
6616 will be inverted into false. */
6617
6618 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
6619 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
6620 t1 = build_int_2 (invert && code == LT_EXPR, 0);
6621
6622 else if (code == EQ_EXPR)
6623 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
6624 TREE_REAL_CST (arg1)),
6625 0);
6626 else
6627 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
6628 TREE_REAL_CST (arg1)),
6629 0);
6630 }
6631
6632 if (t1 == NULL_TREE)
6633 return t;
6634
6635 if (invert)
6636 TREE_INT_CST_LOW (t1) ^= 1;
6637
6638 TREE_TYPE (t1) = type;
6639 if (TREE_CODE (type) == BOOLEAN_TYPE)
6640 return (*lang_hooks.truthvalue_conversion) (t1);
6641 return t1;
6642
6643 case COND_EXPR:
6644 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
6645 so all simple results must be passed through pedantic_non_lvalue. */
6646 if (TREE_CODE (arg0) == INTEGER_CST)
6647 return pedantic_non_lvalue
6648 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
6649 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
6650 return pedantic_omit_one_operand (type, arg1, arg0);
6651
6652 /* If the second operand is zero, invert the comparison and swap
6653 the second and third operands. Likewise if the second operand
6654 is constant and the third is not or if the third operand is
6655 equivalent to the first operand of the comparison. */
6656
6657 if (integer_zerop (arg1)
6658 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
6659 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6660 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6661 TREE_OPERAND (t, 2),
6662 TREE_OPERAND (arg0, 1))))
6663 {
6664 /* See if this can be inverted. If it can't, possibly because
6665 it was a floating-point inequality comparison, don't do
6666 anything. */
6667 tem = invert_truthvalue (arg0);
6668
6669 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
6670 {
6671 t = build (code, type, tem,
6672 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
6673 arg0 = tem;
6674 /* arg1 should be the first argument of the new T. */
6675 arg1 = TREE_OPERAND (t, 1);
6676 STRIP_NOPS (arg1);
6677 }
6678 }
6679
6680 /* If we have A op B ? A : C, we may be able to convert this to a
6681 simpler expression, depending on the operation and the values
6682 of B and C. Signed zeros prevent all of these transformations,
6683 for reasons given above each one. */
6684
6685 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6686 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6687 arg1, TREE_OPERAND (arg0, 1))
6688 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
6689 {
6690 tree arg2 = TREE_OPERAND (t, 2);
6691 enum tree_code comp_code = TREE_CODE (arg0);
6692
6693 STRIP_NOPS (arg2);
6694
6695 /* If we have A op 0 ? A : -A, consider applying the following
6696 transformations:
6697
6698 A == 0? A : -A same as -A
6699 A != 0? A : -A same as A
6700 A >= 0? A : -A same as abs (A)
6701 A > 0? A : -A same as abs (A)
6702 A <= 0? A : -A same as -abs (A)
6703 A < 0? A : -A same as -abs (A)
6704
6705 None of these transformations work for modes with signed
6706 zeros. If A is +/-0, the first two transformations will
6707 change the sign of the result (from +0 to -0, or vice
6708 versa). The last four will fix the sign of the result,
6709 even though the original expressions could be positive or
6710 negative, depending on the sign of A.
6711
6712 Note that all these transformations are correct if A is
6713 NaN, since the two alternatives (A and -A) are also NaNs. */
6714 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
6715 ? real_zerop (TREE_OPERAND (arg0, 1))
6716 : integer_zerop (TREE_OPERAND (arg0, 1)))
6717 && TREE_CODE (arg2) == NEGATE_EXPR
6718 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
6719 switch (comp_code)
6720 {
6721 case EQ_EXPR:
6722 return
6723 pedantic_non_lvalue
6724 (convert (type,
6725 negate_expr
6726 (convert (TREE_TYPE (TREE_OPERAND (t, 1)),
6727 arg1))));
6728 case NE_EXPR:
6729 return pedantic_non_lvalue (convert (type, arg1));
6730 case GE_EXPR:
6731 case GT_EXPR:
6732 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6733 arg1 = convert ((*lang_hooks.types.signed_type)
6734 (TREE_TYPE (arg1)), arg1);
6735 return pedantic_non_lvalue
6736 (convert (type, fold (build1 (ABS_EXPR,
6737 TREE_TYPE (arg1), arg1))));
6738 case LE_EXPR:
6739 case LT_EXPR:
6740 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6741 arg1 = convert ((lang_hooks.types.signed_type)
6742 (TREE_TYPE (arg1)), arg1);
6743 return pedantic_non_lvalue
6744 (negate_expr (convert (type,
6745 fold (build1 (ABS_EXPR,
6746 TREE_TYPE (arg1),
6747 arg1)))));
6748 default:
6749 abort ();
6750 }
6751
6752 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
6753 A == 0 ? A : 0 is always 0 unless A is -0. Note that
6754 both transformations are correct when A is NaN: A != 0
6755 is then true, and A == 0 is false. */
6756
6757 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
6758 {
6759 if (comp_code == NE_EXPR)
6760 return pedantic_non_lvalue (convert (type, arg1));
6761 else if (comp_code == EQ_EXPR)
6762 return pedantic_non_lvalue (convert (type, integer_zero_node));
6763 }
6764
6765 /* Try some transformations of A op B ? A : B.
6766
6767 A == B? A : B same as B
6768 A != B? A : B same as A
6769 A >= B? A : B same as max (A, B)
6770 A > B? A : B same as max (B, A)
6771 A <= B? A : B same as min (A, B)
6772 A < B? A : B same as min (B, A)
6773
6774 As above, these transformations don't work in the presence
6775 of signed zeros. For example, if A and B are zeros of
6776 opposite sign, the first two transformations will change
6777 the sign of the result. In the last four, the original
6778 expressions give different results for (A=+0, B=-0) and
6779 (A=-0, B=+0), but the transformed expressions do not.
6780
6781 The first two transformations are correct if either A or B
6782 is a NaN. In the first transformation, the condition will
6783 be false, and B will indeed be chosen. In the case of the
6784 second transformation, the condition A != B will be true,
6785 and A will be chosen.
6786
6787 The conversions to max() and min() are not correct if B is
6788 a number and A is not. The conditions in the original
6789 expressions will be false, so all four give B. The min()
6790 and max() versions would give a NaN instead. */
6791 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
6792 arg2, TREE_OPERAND (arg0, 0)))
6793 {
6794 tree comp_op0 = TREE_OPERAND (arg0, 0);
6795 tree comp_op1 = TREE_OPERAND (arg0, 1);
6796 tree comp_type = TREE_TYPE (comp_op0);
6797
6798 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
6799 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
6800 comp_type = type;
6801
6802 switch (comp_code)
6803 {
6804 case EQ_EXPR:
6805 return pedantic_non_lvalue (convert (type, arg2));
6806 case NE_EXPR:
6807 return pedantic_non_lvalue (convert (type, arg1));
6808 case LE_EXPR:
6809 case LT_EXPR:
6810 /* In C++ a ?: expression can be an lvalue, so put the
6811 operand which will be used if they are equal first
6812 so that we can convert this back to the
6813 corresponding COND_EXPR. */
6814 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
6815 return pedantic_non_lvalue
6816 (convert (type, fold (build (MIN_EXPR, comp_type,
6817 (comp_code == LE_EXPR
6818 ? comp_op0 : comp_op1),
6819 (comp_code == LE_EXPR
6820 ? comp_op1 : comp_op0)))));
6821 break;
6822 case GE_EXPR:
6823 case GT_EXPR:
6824 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
6825 return pedantic_non_lvalue
6826 (convert (type, fold (build (MAX_EXPR, comp_type,
6827 (comp_code == GE_EXPR
6828 ? comp_op0 : comp_op1),
6829 (comp_code == GE_EXPR
6830 ? comp_op1 : comp_op0)))));
6831 break;
6832 default:
6833 abort ();
6834 }
6835 }
6836
6837 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
6838 we might still be able to simplify this. For example,
6839 if C1 is one less or one more than C2, this might have started
6840 out as a MIN or MAX and been transformed by this function.
6841 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
6842
6843 if (INTEGRAL_TYPE_P (type)
6844 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6845 && TREE_CODE (arg2) == INTEGER_CST)
6846 switch (comp_code)
6847 {
6848 case EQ_EXPR:
6849 /* We can replace A with C1 in this case. */
6850 arg1 = convert (type, TREE_OPERAND (arg0, 1));
6851 t = build (code, type, TREE_OPERAND (t, 0), arg1,
6852 TREE_OPERAND (t, 2));
6853 break;
6854
6855 case LT_EXPR:
6856 /* If C1 is C2 + 1, this is min(A, C2). */
6857 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6858 && operand_equal_p (TREE_OPERAND (arg0, 1),
6859 const_binop (PLUS_EXPR, arg2,
6860 integer_one_node, 0), 1))
6861 return pedantic_non_lvalue
6862 (fold (build (MIN_EXPR, type, arg1, arg2)));
6863 break;
6864
6865 case LE_EXPR:
6866 /* If C1 is C2 - 1, this is min(A, C2). */
6867 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
6868 && operand_equal_p (TREE_OPERAND (arg0, 1),
6869 const_binop (MINUS_EXPR, arg2,
6870 integer_one_node, 0), 1))
6871 return pedantic_non_lvalue
6872 (fold (build (MIN_EXPR, type, arg1, arg2)));
6873 break;
6874
6875 case GT_EXPR:
6876 /* If C1 is C2 - 1, this is max(A, C2). */
6877 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
6878 && operand_equal_p (TREE_OPERAND (arg0, 1),
6879 const_binop (MINUS_EXPR, arg2,
6880 integer_one_node, 0), 1))
6881 return pedantic_non_lvalue
6882 (fold (build (MAX_EXPR, type, arg1, arg2)));
6883 break;
6884
6885 case GE_EXPR:
6886 /* If C1 is C2 + 1, this is max(A, C2). */
6887 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6888 && operand_equal_p (TREE_OPERAND (arg0, 1),
6889 const_binop (PLUS_EXPR, arg2,
6890 integer_one_node, 0), 1))
6891 return pedantic_non_lvalue
6892 (fold (build (MAX_EXPR, type, arg1, arg2)));
6893 break;
6894 case NE_EXPR:
6895 break;
6896 default:
6897 abort ();
6898 }
6899 }
6900
6901 /* If the second operand is simpler than the third, swap them
6902 since that produces better jump optimization results. */
6903 if ((TREE_CONSTANT (arg1) || DECL_P (arg1)
6904 || TREE_CODE (arg1) == SAVE_EXPR)
6905 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
6906 || DECL_P (TREE_OPERAND (t, 2))
6907 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
6908 {
6909 /* See if this can be inverted. If it can't, possibly because
6910 it was a floating-point inequality comparison, don't do
6911 anything. */
6912 tem = invert_truthvalue (arg0);
6913
6914 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
6915 {
6916 t = build (code, type, tem,
6917 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
6918 arg0 = tem;
6919 /* arg1 should be the first argument of the new T. */
6920 arg1 = TREE_OPERAND (t, 1);
6921 STRIP_NOPS (arg1);
6922 }
6923 }
6924
6925 /* Convert A ? 1 : 0 to simply A. */
6926 if (integer_onep (TREE_OPERAND (t, 1))
6927 && integer_zerop (TREE_OPERAND (t, 2))
6928 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
6929 call to fold will try to move the conversion inside
6930 a COND, which will recurse. In that case, the COND_EXPR
6931 is probably the best choice, so leave it alone. */
6932 && type == TREE_TYPE (arg0))
6933 return pedantic_non_lvalue (arg0);
6934
6935 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
6936 operation is simply A & 2. */
6937
6938 if (integer_zerop (TREE_OPERAND (t, 2))
6939 && TREE_CODE (arg0) == NE_EXPR
6940 && integer_zerop (TREE_OPERAND (arg0, 1))
6941 && integer_pow2p (arg1)
6942 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
6943 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
6944 arg1, 1))
6945 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
6946
6947 return t;
6948
6949 case COMPOUND_EXPR:
6950 /* When pedantic, a compound expression can be neither an lvalue
6951 nor an integer constant expression. */
6952 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
6953 return t;
6954 /* Don't let (0, 0) be null pointer constant. */
6955 if (integer_zerop (arg1))
6956 return build1 (NOP_EXPR, type, arg1);
6957 return convert (type, arg1);
6958
6959 case COMPLEX_EXPR:
6960 if (wins)
6961 return build_complex (type, arg0, arg1);
6962 return t;
6963
6964 case REALPART_EXPR:
6965 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6966 return t;
6967 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6968 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
6969 TREE_OPERAND (arg0, 1));
6970 else if (TREE_CODE (arg0) == COMPLEX_CST)
6971 return TREE_REALPART (arg0);
6972 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6973 return fold (build (TREE_CODE (arg0), type,
6974 fold (build1 (REALPART_EXPR, type,
6975 TREE_OPERAND (arg0, 0))),
6976 fold (build1 (REALPART_EXPR,
6977 type, TREE_OPERAND (arg0, 1)))));
6978 return t;
6979
6980 case IMAGPART_EXPR:
6981 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6982 return convert (type, integer_zero_node);
6983 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6984 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
6985 TREE_OPERAND (arg0, 0));
6986 else if (TREE_CODE (arg0) == COMPLEX_CST)
6987 return TREE_IMAGPART (arg0);
6988 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6989 return fold (build (TREE_CODE (arg0), type,
6990 fold (build1 (IMAGPART_EXPR, type,
6991 TREE_OPERAND (arg0, 0))),
6992 fold (build1 (IMAGPART_EXPR, type,
6993 TREE_OPERAND (arg0, 1)))));
6994 return t;
6995
6996 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
6997 appropriate. */
6998 case CLEANUP_POINT_EXPR:
6999 if (! has_cleanups (arg0))
7000 return TREE_OPERAND (t, 0);
7001
7002 {
7003 enum tree_code code0 = TREE_CODE (arg0);
7004 int kind0 = TREE_CODE_CLASS (code0);
7005 tree arg00 = TREE_OPERAND (arg0, 0);
7006 tree arg01;
7007
7008 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
7009 return fold (build1 (code0, type,
7010 fold (build1 (CLEANUP_POINT_EXPR,
7011 TREE_TYPE (arg00), arg00))));
7012
7013 if (kind0 == '<' || kind0 == '2'
7014 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
7015 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
7016 || code0 == TRUTH_XOR_EXPR)
7017 {
7018 arg01 = TREE_OPERAND (arg0, 1);
7019
7020 if (TREE_CONSTANT (arg00)
7021 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
7022 && ! has_cleanups (arg00)))
7023 return fold (build (code0, type, arg00,
7024 fold (build1 (CLEANUP_POINT_EXPR,
7025 TREE_TYPE (arg01), arg01))));
7026
7027 if (TREE_CONSTANT (arg01))
7028 return fold (build (code0, type,
7029 fold (build1 (CLEANUP_POINT_EXPR,
7030 TREE_TYPE (arg00), arg00)),
7031 arg01));
7032 }
7033
7034 return t;
7035 }
7036
7037 case CALL_EXPR:
7038 /* Check for a built-in function. */
7039 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
7040 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
7041 == FUNCTION_DECL)
7042 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
7043 {
7044 tree tmp = fold_builtin (expr);
7045 if (tmp)
7046 return tmp;
7047 }
7048 return t;
7049
7050 default:
7051 return t;
7052 } /* switch (code) */
7053 }
7054
7055 /* Determine if first argument is a multiple of second argument. Return 0 if
7056 it is not, or we cannot easily determined it to be.
7057
7058 An example of the sort of thing we care about (at this point; this routine
7059 could surely be made more general, and expanded to do what the *_DIV_EXPR's
7060 fold cases do now) is discovering that
7061
7062 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7063
7064 is a multiple of
7065
7066 SAVE_EXPR (J * 8)
7067
7068 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
7069
7070 This code also handles discovering that
7071
7072 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7073
7074 is a multiple of 8 so we don't have to worry about dealing with a
7075 possible remainder.
7076
7077 Note that we *look* inside a SAVE_EXPR only to determine how it was
7078 calculated; it is not safe for fold to do much of anything else with the
7079 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
7080 at run time. For example, the latter example above *cannot* be implemented
7081 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
7082 evaluation time of the original SAVE_EXPR is not necessarily the same at
7083 the time the new expression is evaluated. The only optimization of this
7084 sort that would be valid is changing
7085
7086 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
7087
7088 divided by 8 to
7089
7090 SAVE_EXPR (I) * SAVE_EXPR (J)
7091
7092 (where the same SAVE_EXPR (J) is used in the original and the
7093 transformed version). */
7094
7095 static int
7096 multiple_of_p (type, top, bottom)
7097 tree type;
7098 tree top;
7099 tree bottom;
7100 {
7101 if (operand_equal_p (top, bottom, 0))
7102 return 1;
7103
7104 if (TREE_CODE (type) != INTEGER_TYPE)
7105 return 0;
7106
7107 switch (TREE_CODE (top))
7108 {
7109 case MULT_EXPR:
7110 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7111 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7112
7113 case PLUS_EXPR:
7114 case MINUS_EXPR:
7115 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7116 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7117
7118 case LSHIFT_EXPR:
7119 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
7120 {
7121 tree op1, t1;
7122
7123 op1 = TREE_OPERAND (top, 1);
7124 /* const_binop may not detect overflow correctly,
7125 so check for it explicitly here. */
7126 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
7127 > TREE_INT_CST_LOW (op1)
7128 && TREE_INT_CST_HIGH (op1) == 0
7129 && 0 != (t1 = convert (type,
7130 const_binop (LSHIFT_EXPR, size_one_node,
7131 op1, 0)))
7132 && ! TREE_OVERFLOW (t1))
7133 return multiple_of_p (type, t1, bottom);
7134 }
7135 return 0;
7136
7137 case NOP_EXPR:
7138 /* Can't handle conversions from non-integral or wider integral type. */
7139 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
7140 || (TYPE_PRECISION (type)
7141 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
7142 return 0;
7143
7144 /* .. fall through ... */
7145
7146 case SAVE_EXPR:
7147 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
7148
7149 case INTEGER_CST:
7150 if (TREE_CODE (bottom) != INTEGER_CST
7151 || (TREE_UNSIGNED (type)
7152 && (tree_int_cst_sgn (top) < 0
7153 || tree_int_cst_sgn (bottom) < 0)))
7154 return 0;
7155 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
7156 top, bottom, 0));
7157
7158 default:
7159 return 0;
7160 }
7161 }
7162
7163 /* Return true if `t' is known to be non-negative. */
7164
7165 int
7166 tree_expr_nonnegative_p (t)
7167 tree t;
7168 {
7169 switch (TREE_CODE (t))
7170 {
7171 case ABS_EXPR:
7172 case FFS_EXPR:
7173 return 1;
7174 case INTEGER_CST:
7175 return tree_int_cst_sgn (t) >= 0;
7176 case TRUNC_DIV_EXPR:
7177 case CEIL_DIV_EXPR:
7178 case FLOOR_DIV_EXPR:
7179 case ROUND_DIV_EXPR:
7180 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7181 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7182 case TRUNC_MOD_EXPR:
7183 case CEIL_MOD_EXPR:
7184 case FLOOR_MOD_EXPR:
7185 case ROUND_MOD_EXPR:
7186 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7187 case COND_EXPR:
7188 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
7189 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
7190 case COMPOUND_EXPR:
7191 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7192 case MIN_EXPR:
7193 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7194 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7195 case MAX_EXPR:
7196 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7197 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7198 case MODIFY_EXPR:
7199 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7200 case BIND_EXPR:
7201 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7202 case SAVE_EXPR:
7203 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7204 case NON_LVALUE_EXPR:
7205 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7206 case RTL_EXPR:
7207 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
7208
7209 default:
7210 if (truth_value_p (TREE_CODE (t)))
7211 /* Truth values evaluate to 0 or 1, which is nonnegative. */
7212 return 1;
7213 else
7214 /* We don't know sign of `t', so be conservative and return false. */
7215 return 0;
7216 }
7217 }
7218
7219 /* Return true if `r' is known to be non-negative.
7220 Only handles constants at the moment. */
7221
7222 int
7223 rtl_expr_nonnegative_p (r)
7224 rtx r;
7225 {
7226 switch (GET_CODE (r))
7227 {
7228 case CONST_INT:
7229 return INTVAL (r) >= 0;
7230
7231 case CONST_DOUBLE:
7232 if (GET_MODE (r) == VOIDmode)
7233 return CONST_DOUBLE_HIGH (r) >= 0;
7234 return 0;
7235
7236 case CONST_VECTOR:
7237 {
7238 int units, i;
7239 rtx elt;
7240
7241 units = CONST_VECTOR_NUNITS (r);
7242
7243 for (i = 0; i < units; ++i)
7244 {
7245 elt = CONST_VECTOR_ELT (r, i);
7246 if (!rtl_expr_nonnegative_p (elt))
7247 return 0;
7248 }
7249
7250 return 1;
7251 }
7252
7253 case SYMBOL_REF:
7254 case LABEL_REF:
7255 /* These are always nonnegative. */
7256 return 1;
7257
7258 default:
7259 return 0;
7260 }
7261 }
7262
7263 #include "gt-fold-const.h"