fold-const.c (build_range_check): Optimize (c>=1) && (c<=127) into the equivalent...
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "rtl.h"
50 #include "expr.h"
51 #include "tm_p.h"
52 #include "toplev.h"
53 #include "ggc.h"
54 #include "hashtab.h"
55 #include "langhooks.h"
56
57 static void encode PARAMS ((HOST_WIDE_INT *,
58 unsigned HOST_WIDE_INT,
59 HOST_WIDE_INT));
60 static void decode PARAMS ((HOST_WIDE_INT *,
61 unsigned HOST_WIDE_INT *,
62 HOST_WIDE_INT *));
63 static tree negate_expr PARAMS ((tree));
64 static tree split_tree PARAMS ((tree, enum tree_code, tree *, tree *,
65 tree *, int));
66 static tree associate_trees PARAMS ((tree, tree, enum tree_code, tree));
67 static tree int_const_binop PARAMS ((enum tree_code, tree, tree, int));
68 static tree const_binop PARAMS ((enum tree_code, tree, tree, int));
69 static hashval_t size_htab_hash PARAMS ((const void *));
70 static int size_htab_eq PARAMS ((const void *, const void *));
71 static tree fold_convert PARAMS ((tree, tree));
72 static enum tree_code invert_tree_comparison PARAMS ((enum tree_code));
73 static enum tree_code swap_tree_comparison PARAMS ((enum tree_code));
74 static int truth_value_p PARAMS ((enum tree_code));
75 static int operand_equal_for_comparison_p PARAMS ((tree, tree, tree));
76 static int twoval_comparison_p PARAMS ((tree, tree *, tree *, int *));
77 static tree eval_subst PARAMS ((tree, tree, tree, tree, tree));
78 static tree omit_one_operand PARAMS ((tree, tree, tree));
79 static tree pedantic_omit_one_operand PARAMS ((tree, tree, tree));
80 static tree distribute_bit_expr PARAMS ((enum tree_code, tree, tree, tree));
81 static tree make_bit_field_ref PARAMS ((tree, tree, int, int, int));
82 static tree optimize_bit_field_compare PARAMS ((enum tree_code, tree,
83 tree, tree));
84 static tree decode_field_reference PARAMS ((tree, HOST_WIDE_INT *,
85 HOST_WIDE_INT *,
86 enum machine_mode *, int *,
87 int *, tree *, tree *));
88 static int all_ones_mask_p PARAMS ((tree, int));
89 static tree sign_bit_p PARAMS ((tree, tree));
90 static int simple_operand_p PARAMS ((tree));
91 static tree range_binop PARAMS ((enum tree_code, tree, tree, int,
92 tree, int));
93 static tree make_range PARAMS ((tree, int *, tree *, tree *));
94 static tree build_range_check PARAMS ((tree, tree, int, tree, tree));
95 static int merge_ranges PARAMS ((int *, tree *, tree *, int, tree, tree,
96 int, tree, tree));
97 static tree fold_range_test PARAMS ((tree));
98 static tree unextend PARAMS ((tree, int, int, tree));
99 static tree fold_truthop PARAMS ((enum tree_code, tree, tree, tree));
100 static tree optimize_minmax_comparison PARAMS ((tree));
101 static tree extract_muldiv PARAMS ((tree, tree, enum tree_code, tree));
102 static tree strip_compound_expr PARAMS ((tree, tree));
103 static int multiple_of_p PARAMS ((tree, tree, tree));
104 static tree constant_boolean_node PARAMS ((int, tree));
105 static int count_cond PARAMS ((tree, int));
106 static tree fold_binary_op_with_conditional_arg
107 PARAMS ((enum tree_code, tree, tree, tree, int));
108 static bool fold_real_zero_addition_p PARAMS ((tree, tree, int));
109
110 #if defined(HOST_EBCDIC)
111 /* bit 8 is significant in EBCDIC */
112 #define CHARMASK 0xff
113 #else
114 #define CHARMASK 0x7f
115 #endif
116
117 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
118 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
119 and SUM1. Then this yields nonzero if overflow occurred during the
120 addition.
121
122 Overflow occurs if A and B have the same sign, but A and SUM differ in
123 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
124 sign. */
125 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
126 \f
127 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
128 We do that by representing the two-word integer in 4 words, with only
129 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
130 number. The value of the word is LOWPART + HIGHPART * BASE. */
131
132 #define LOWPART(x) \
133 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
134 #define HIGHPART(x) \
135 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
136 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
137
138 /* Unpack a two-word integer into 4 words.
139 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
140 WORDS points to the array of HOST_WIDE_INTs. */
141
142 static void
143 encode (words, low, hi)
144 HOST_WIDE_INT *words;
145 unsigned HOST_WIDE_INT low;
146 HOST_WIDE_INT hi;
147 {
148 words[0] = LOWPART (low);
149 words[1] = HIGHPART (low);
150 words[2] = LOWPART (hi);
151 words[3] = HIGHPART (hi);
152 }
153
154 /* Pack an array of 4 words into a two-word integer.
155 WORDS points to the array of words.
156 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
157
158 static void
159 decode (words, low, hi)
160 HOST_WIDE_INT *words;
161 unsigned HOST_WIDE_INT *low;
162 HOST_WIDE_INT *hi;
163 {
164 *low = words[0] + words[1] * BASE;
165 *hi = words[2] + words[3] * BASE;
166 }
167 \f
168 /* Make the integer constant T valid for its type by setting to 0 or 1 all
169 the bits in the constant that don't belong in the type.
170
171 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
172 nonzero, a signed overflow has already occurred in calculating T, so
173 propagate it.
174
175 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
176 if it exists. */
177
178 int
179 force_fit_type (t, overflow)
180 tree t;
181 int overflow;
182 {
183 unsigned HOST_WIDE_INT low;
184 HOST_WIDE_INT high;
185 unsigned int prec;
186
187 if (TREE_CODE (t) == REAL_CST)
188 {
189 #ifdef CHECK_FLOAT_VALUE
190 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t)), TREE_REAL_CST (t),
191 overflow);
192 #endif
193 return overflow;
194 }
195
196 else if (TREE_CODE (t) != INTEGER_CST)
197 return overflow;
198
199 low = TREE_INT_CST_LOW (t);
200 high = TREE_INT_CST_HIGH (t);
201
202 if (POINTER_TYPE_P (TREE_TYPE (t)))
203 prec = POINTER_SIZE;
204 else
205 prec = TYPE_PRECISION (TREE_TYPE (t));
206
207 /* First clear all bits that are beyond the type's precision. */
208
209 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
210 ;
211 else if (prec > HOST_BITS_PER_WIDE_INT)
212 TREE_INT_CST_HIGH (t)
213 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
214 else
215 {
216 TREE_INT_CST_HIGH (t) = 0;
217 if (prec < HOST_BITS_PER_WIDE_INT)
218 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
219 }
220
221 /* Unsigned types do not suffer sign extension or overflow unless they
222 are a sizetype. */
223 if (TREE_UNSIGNED (TREE_TYPE (t))
224 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
225 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
226 return overflow;
227
228 /* If the value's sign bit is set, extend the sign. */
229 if (prec != 2 * HOST_BITS_PER_WIDE_INT
230 && (prec > HOST_BITS_PER_WIDE_INT
231 ? 0 != (TREE_INT_CST_HIGH (t)
232 & ((HOST_WIDE_INT) 1
233 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
234 : 0 != (TREE_INT_CST_LOW (t)
235 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
236 {
237 /* Value is negative:
238 set to 1 all the bits that are outside this type's precision. */
239 if (prec > HOST_BITS_PER_WIDE_INT)
240 TREE_INT_CST_HIGH (t)
241 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
242 else
243 {
244 TREE_INT_CST_HIGH (t) = -1;
245 if (prec < HOST_BITS_PER_WIDE_INT)
246 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
247 }
248 }
249
250 /* Return nonzero if signed overflow occurred. */
251 return
252 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
253 != 0);
254 }
255 \f
256 /* Add two doubleword integers with doubleword result.
257 Each argument is given as two `HOST_WIDE_INT' pieces.
258 One argument is L1 and H1; the other, L2 and H2.
259 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
260
261 int
262 add_double (l1, h1, l2, h2, lv, hv)
263 unsigned HOST_WIDE_INT l1, l2;
264 HOST_WIDE_INT h1, h2;
265 unsigned HOST_WIDE_INT *lv;
266 HOST_WIDE_INT *hv;
267 {
268 unsigned HOST_WIDE_INT l;
269 HOST_WIDE_INT h;
270
271 l = l1 + l2;
272 h = h1 + h2 + (l < l1);
273
274 *lv = l;
275 *hv = h;
276 return OVERFLOW_SUM_SIGN (h1, h2, h);
277 }
278
279 /* Negate a doubleword integer with doubleword result.
280 Return nonzero if the operation overflows, assuming it's signed.
281 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
282 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
283
284 int
285 neg_double (l1, h1, lv, hv)
286 unsigned HOST_WIDE_INT l1;
287 HOST_WIDE_INT h1;
288 unsigned HOST_WIDE_INT *lv;
289 HOST_WIDE_INT *hv;
290 {
291 if (l1 == 0)
292 {
293 *lv = 0;
294 *hv = - h1;
295 return (*hv & h1) < 0;
296 }
297 else
298 {
299 *lv = -l1;
300 *hv = ~h1;
301 return 0;
302 }
303 }
304 \f
305 /* Multiply two doubleword integers with doubleword result.
306 Return nonzero if the operation overflows, assuming it's signed.
307 Each argument is given as two `HOST_WIDE_INT' pieces.
308 One argument is L1 and H1; the other, L2 and H2.
309 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
310
311 int
312 mul_double (l1, h1, l2, h2, lv, hv)
313 unsigned HOST_WIDE_INT l1, l2;
314 HOST_WIDE_INT h1, h2;
315 unsigned HOST_WIDE_INT *lv;
316 HOST_WIDE_INT *hv;
317 {
318 HOST_WIDE_INT arg1[4];
319 HOST_WIDE_INT arg2[4];
320 HOST_WIDE_INT prod[4 * 2];
321 unsigned HOST_WIDE_INT carry;
322 int i, j, k;
323 unsigned HOST_WIDE_INT toplow, neglow;
324 HOST_WIDE_INT tophigh, neghigh;
325
326 encode (arg1, l1, h1);
327 encode (arg2, l2, h2);
328
329 memset ((char *) prod, 0, sizeof prod);
330
331 for (i = 0; i < 4; i++)
332 {
333 carry = 0;
334 for (j = 0; j < 4; j++)
335 {
336 k = i + j;
337 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
338 carry += arg1[i] * arg2[j];
339 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
340 carry += prod[k];
341 prod[k] = LOWPART (carry);
342 carry = HIGHPART (carry);
343 }
344 prod[i + 4] = carry;
345 }
346
347 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
348
349 /* Check for overflow by calculating the top half of the answer in full;
350 it should agree with the low half's sign bit. */
351 decode (prod + 4, &toplow, &tophigh);
352 if (h1 < 0)
353 {
354 neg_double (l2, h2, &neglow, &neghigh);
355 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
356 }
357 if (h2 < 0)
358 {
359 neg_double (l1, h1, &neglow, &neghigh);
360 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
361 }
362 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
363 }
364 \f
365 /* Shift the doubleword integer in L1, H1 left by COUNT places
366 keeping only PREC bits of result.
367 Shift right if COUNT is negative.
368 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
369 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
370
371 void
372 lshift_double (l1, h1, count, prec, lv, hv, arith)
373 unsigned HOST_WIDE_INT l1;
374 HOST_WIDE_INT h1, count;
375 unsigned int prec;
376 unsigned HOST_WIDE_INT *lv;
377 HOST_WIDE_INT *hv;
378 int arith;
379 {
380 unsigned HOST_WIDE_INT signmask;
381
382 if (count < 0)
383 {
384 rshift_double (l1, h1, -count, prec, lv, hv, arith);
385 return;
386 }
387
388 #ifdef SHIFT_COUNT_TRUNCATED
389 if (SHIFT_COUNT_TRUNCATED)
390 count %= prec;
391 #endif
392
393 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
394 {
395 /* Shifting by the host word size is undefined according to the
396 ANSI standard, so we must handle this as a special case. */
397 *hv = 0;
398 *lv = 0;
399 }
400 else if (count >= HOST_BITS_PER_WIDE_INT)
401 {
402 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
403 *lv = 0;
404 }
405 else
406 {
407 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
408 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
409 *lv = l1 << count;
410 }
411
412 /* Sign extend all bits that are beyond the precision. */
413
414 signmask = -((prec > HOST_BITS_PER_WIDE_INT
415 ? ((unsigned HOST_WIDE_INT) *hv
416 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
417 : (*lv >> (prec - 1))) & 1);
418
419 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
420 ;
421 else if (prec >= HOST_BITS_PER_WIDE_INT)
422 {
423 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
424 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
425 }
426 else
427 {
428 *hv = signmask;
429 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
430 *lv |= signmask << prec;
431 }
432 }
433
434 /* Shift the doubleword integer in L1, H1 right by COUNT places
435 keeping only PREC bits of result. COUNT must be positive.
436 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
437 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
438
439 void
440 rshift_double (l1, h1, count, prec, lv, hv, arith)
441 unsigned HOST_WIDE_INT l1;
442 HOST_WIDE_INT h1, count;
443 unsigned int prec;
444 unsigned HOST_WIDE_INT *lv;
445 HOST_WIDE_INT *hv;
446 int arith;
447 {
448 unsigned HOST_WIDE_INT signmask;
449
450 signmask = (arith
451 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
452 : 0);
453
454 #ifdef SHIFT_COUNT_TRUNCATED
455 if (SHIFT_COUNT_TRUNCATED)
456 count %= prec;
457 #endif
458
459 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
460 {
461 /* Shifting by the host word size is undefined according to the
462 ANSI standard, so we must handle this as a special case. */
463 *hv = 0;
464 *lv = 0;
465 }
466 else if (count >= HOST_BITS_PER_WIDE_INT)
467 {
468 *hv = 0;
469 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
470 }
471 else
472 {
473 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
474 *lv = ((l1 >> count)
475 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
476 }
477
478 /* Zero / sign extend all bits that are beyond the precision. */
479
480 if (count >= (HOST_WIDE_INT)prec)
481 {
482 *hv = signmask;
483 *lv = signmask;
484 }
485 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
486 ;
487 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
488 {
489 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
490 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
491 }
492 else
493 {
494 *hv = signmask;
495 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
496 *lv |= signmask << (prec - count);
497 }
498 }
499 \f
500 /* Rotate the doubleword integer in L1, H1 left by COUNT places
501 keeping only PREC bits of result.
502 Rotate right if COUNT is negative.
503 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
504
505 void
506 lrotate_double (l1, h1, count, prec, lv, hv)
507 unsigned HOST_WIDE_INT l1;
508 HOST_WIDE_INT h1, count;
509 unsigned int prec;
510 unsigned HOST_WIDE_INT *lv;
511 HOST_WIDE_INT *hv;
512 {
513 unsigned HOST_WIDE_INT s1l, s2l;
514 HOST_WIDE_INT s1h, s2h;
515
516 count %= prec;
517 if (count < 0)
518 count += prec;
519
520 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
521 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
522 *lv = s1l | s2l;
523 *hv = s1h | s2h;
524 }
525
526 /* Rotate the doubleword integer in L1, H1 left by COUNT places
527 keeping only PREC bits of result. COUNT must be positive.
528 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
529
530 void
531 rrotate_double (l1, h1, count, prec, lv, hv)
532 unsigned HOST_WIDE_INT l1;
533 HOST_WIDE_INT h1, count;
534 unsigned int prec;
535 unsigned HOST_WIDE_INT *lv;
536 HOST_WIDE_INT *hv;
537 {
538 unsigned HOST_WIDE_INT s1l, s2l;
539 HOST_WIDE_INT s1h, s2h;
540
541 count %= prec;
542 if (count < 0)
543 count += prec;
544
545 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
546 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
547 *lv = s1l | s2l;
548 *hv = s1h | s2h;
549 }
550 \f
551 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
552 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
553 CODE is a tree code for a kind of division, one of
554 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
555 or EXACT_DIV_EXPR
556 It controls how the quotient is rounded to an integer.
557 Return nonzero if the operation overflows.
558 UNS nonzero says do unsigned division. */
559
560 int
561 div_and_round_double (code, uns,
562 lnum_orig, hnum_orig, lden_orig, hden_orig,
563 lquo, hquo, lrem, hrem)
564 enum tree_code code;
565 int uns;
566 unsigned HOST_WIDE_INT lnum_orig; /* num == numerator == dividend */
567 HOST_WIDE_INT hnum_orig;
568 unsigned HOST_WIDE_INT lden_orig; /* den == denominator == divisor */
569 HOST_WIDE_INT hden_orig;
570 unsigned HOST_WIDE_INT *lquo, *lrem;
571 HOST_WIDE_INT *hquo, *hrem;
572 {
573 int quo_neg = 0;
574 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
575 HOST_WIDE_INT den[4], quo[4];
576 int i, j;
577 unsigned HOST_WIDE_INT work;
578 unsigned HOST_WIDE_INT carry = 0;
579 unsigned HOST_WIDE_INT lnum = lnum_orig;
580 HOST_WIDE_INT hnum = hnum_orig;
581 unsigned HOST_WIDE_INT lden = lden_orig;
582 HOST_WIDE_INT hden = hden_orig;
583 int overflow = 0;
584
585 if (hden == 0 && lden == 0)
586 overflow = 1, lden = 1;
587
588 /* calculate quotient sign and convert operands to unsigned. */
589 if (!uns)
590 {
591 if (hnum < 0)
592 {
593 quo_neg = ~ quo_neg;
594 /* (minimum integer) / (-1) is the only overflow case. */
595 if (neg_double (lnum, hnum, &lnum, &hnum)
596 && ((HOST_WIDE_INT) lden & hden) == -1)
597 overflow = 1;
598 }
599 if (hden < 0)
600 {
601 quo_neg = ~ quo_neg;
602 neg_double (lden, hden, &lden, &hden);
603 }
604 }
605
606 if (hnum == 0 && hden == 0)
607 { /* single precision */
608 *hquo = *hrem = 0;
609 /* This unsigned division rounds toward zero. */
610 *lquo = lnum / lden;
611 goto finish_up;
612 }
613
614 if (hnum == 0)
615 { /* trivial case: dividend < divisor */
616 /* hden != 0 already checked. */
617 *hquo = *lquo = 0;
618 *hrem = hnum;
619 *lrem = lnum;
620 goto finish_up;
621 }
622
623 memset ((char *) quo, 0, sizeof quo);
624
625 memset ((char *) num, 0, sizeof num); /* to zero 9th element */
626 memset ((char *) den, 0, sizeof den);
627
628 encode (num, lnum, hnum);
629 encode (den, lden, hden);
630
631 /* Special code for when the divisor < BASE. */
632 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
633 {
634 /* hnum != 0 already checked. */
635 for (i = 4 - 1; i >= 0; i--)
636 {
637 work = num[i] + carry * BASE;
638 quo[i] = work / lden;
639 carry = work % lden;
640 }
641 }
642 else
643 {
644 /* Full double precision division,
645 with thanks to Don Knuth's "Seminumerical Algorithms". */
646 int num_hi_sig, den_hi_sig;
647 unsigned HOST_WIDE_INT quo_est, scale;
648
649 /* Find the highest non-zero divisor digit. */
650 for (i = 4 - 1;; i--)
651 if (den[i] != 0)
652 {
653 den_hi_sig = i;
654 break;
655 }
656
657 /* Insure that the first digit of the divisor is at least BASE/2.
658 This is required by the quotient digit estimation algorithm. */
659
660 scale = BASE / (den[den_hi_sig] + 1);
661 if (scale > 1)
662 { /* scale divisor and dividend */
663 carry = 0;
664 for (i = 0; i <= 4 - 1; i++)
665 {
666 work = (num[i] * scale) + carry;
667 num[i] = LOWPART (work);
668 carry = HIGHPART (work);
669 }
670
671 num[4] = carry;
672 carry = 0;
673 for (i = 0; i <= 4 - 1; i++)
674 {
675 work = (den[i] * scale) + carry;
676 den[i] = LOWPART (work);
677 carry = HIGHPART (work);
678 if (den[i] != 0) den_hi_sig = i;
679 }
680 }
681
682 num_hi_sig = 4;
683
684 /* Main loop */
685 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
686 {
687 /* Guess the next quotient digit, quo_est, by dividing the first
688 two remaining dividend digits by the high order quotient digit.
689 quo_est is never low and is at most 2 high. */
690 unsigned HOST_WIDE_INT tmp;
691
692 num_hi_sig = i + den_hi_sig + 1;
693 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
694 if (num[num_hi_sig] != den[den_hi_sig])
695 quo_est = work / den[den_hi_sig];
696 else
697 quo_est = BASE - 1;
698
699 /* Refine quo_est so it's usually correct, and at most one high. */
700 tmp = work - quo_est * den[den_hi_sig];
701 if (tmp < BASE
702 && (den[den_hi_sig - 1] * quo_est
703 > (tmp * BASE + num[num_hi_sig - 2])))
704 quo_est--;
705
706 /* Try QUO_EST as the quotient digit, by multiplying the
707 divisor by QUO_EST and subtracting from the remaining dividend.
708 Keep in mind that QUO_EST is the I - 1st digit. */
709
710 carry = 0;
711 for (j = 0; j <= den_hi_sig; j++)
712 {
713 work = quo_est * den[j] + carry;
714 carry = HIGHPART (work);
715 work = num[i + j] - LOWPART (work);
716 num[i + j] = LOWPART (work);
717 carry += HIGHPART (work) != 0;
718 }
719
720 /* If quo_est was high by one, then num[i] went negative and
721 we need to correct things. */
722 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
723 {
724 quo_est--;
725 carry = 0; /* add divisor back in */
726 for (j = 0; j <= den_hi_sig; j++)
727 {
728 work = num[i + j] + den[j] + carry;
729 carry = HIGHPART (work);
730 num[i + j] = LOWPART (work);
731 }
732
733 num [num_hi_sig] += carry;
734 }
735
736 /* Store the quotient digit. */
737 quo[i] = quo_est;
738 }
739 }
740
741 decode (quo, lquo, hquo);
742
743 finish_up:
744 /* if result is negative, make it so. */
745 if (quo_neg)
746 neg_double (*lquo, *hquo, lquo, hquo);
747
748 /* compute trial remainder: rem = num - (quo * den) */
749 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
750 neg_double (*lrem, *hrem, lrem, hrem);
751 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
752
753 switch (code)
754 {
755 case TRUNC_DIV_EXPR:
756 case TRUNC_MOD_EXPR: /* round toward zero */
757 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
758 return overflow;
759
760 case FLOOR_DIV_EXPR:
761 case FLOOR_MOD_EXPR: /* round toward negative infinity */
762 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
763 {
764 /* quo = quo - 1; */
765 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
766 lquo, hquo);
767 }
768 else
769 return overflow;
770 break;
771
772 case CEIL_DIV_EXPR:
773 case CEIL_MOD_EXPR: /* round toward positive infinity */
774 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
775 {
776 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
777 lquo, hquo);
778 }
779 else
780 return overflow;
781 break;
782
783 case ROUND_DIV_EXPR:
784 case ROUND_MOD_EXPR: /* round to closest integer */
785 {
786 unsigned HOST_WIDE_INT labs_rem = *lrem;
787 HOST_WIDE_INT habs_rem = *hrem;
788 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
789 HOST_WIDE_INT habs_den = hden, htwice;
790
791 /* Get absolute values */
792 if (*hrem < 0)
793 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
794 if (hden < 0)
795 neg_double (lden, hden, &labs_den, &habs_den);
796
797 /* If (2 * abs (lrem) >= abs (lden)) */
798 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
799 labs_rem, habs_rem, &ltwice, &htwice);
800
801 if (((unsigned HOST_WIDE_INT) habs_den
802 < (unsigned HOST_WIDE_INT) htwice)
803 || (((unsigned HOST_WIDE_INT) habs_den
804 == (unsigned HOST_WIDE_INT) htwice)
805 && (labs_den < ltwice)))
806 {
807 if (*hquo < 0)
808 /* quo = quo - 1; */
809 add_double (*lquo, *hquo,
810 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
811 else
812 /* quo = quo + 1; */
813 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
814 lquo, hquo);
815 }
816 else
817 return overflow;
818 }
819 break;
820
821 default:
822 abort ();
823 }
824
825 /* compute true remainder: rem = num - (quo * den) */
826 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
827 neg_double (*lrem, *hrem, lrem, hrem);
828 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
829 return overflow;
830 }
831 \f
832 /* Given T, an expression, return the negation of T. Allow for T to be
833 null, in which case return null. */
834
835 static tree
836 negate_expr (t)
837 tree t;
838 {
839 tree type;
840 tree tem;
841
842 if (t == 0)
843 return 0;
844
845 type = TREE_TYPE (t);
846 STRIP_SIGN_NOPS (t);
847
848 switch (TREE_CODE (t))
849 {
850 case INTEGER_CST:
851 case REAL_CST:
852 if (! TREE_UNSIGNED (type)
853 && 0 != (tem = fold (build1 (NEGATE_EXPR, type, t)))
854 && ! TREE_OVERFLOW (tem))
855 return tem;
856 break;
857
858 case NEGATE_EXPR:
859 return convert (type, TREE_OPERAND (t, 0));
860
861 case MINUS_EXPR:
862 /* - (A - B) -> B - A */
863 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
864 return convert (type,
865 fold (build (MINUS_EXPR, TREE_TYPE (t),
866 TREE_OPERAND (t, 1),
867 TREE_OPERAND (t, 0))));
868 break;
869
870 default:
871 break;
872 }
873
874 return convert (type, fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t)));
875 }
876 \f
877 /* Split a tree IN into a constant, literal and variable parts that could be
878 combined with CODE to make IN. "constant" means an expression with
879 TREE_CONSTANT but that isn't an actual constant. CODE must be a
880 commutative arithmetic operation. Store the constant part into *CONP,
881 the literal in *LITP and return the variable part. If a part isn't
882 present, set it to null. If the tree does not decompose in this way,
883 return the entire tree as the variable part and the other parts as null.
884
885 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
886 case, we negate an operand that was subtracted. Except if it is a
887 literal for which we use *MINUS_LITP instead.
888
889 If NEGATE_P is true, we are negating all of IN, again except a literal
890 for which we use *MINUS_LITP instead.
891
892 If IN is itself a literal or constant, return it as appropriate.
893
894 Note that we do not guarantee that any of the three values will be the
895 same type as IN, but they will have the same signedness and mode. */
896
897 static tree
898 split_tree (in, code, conp, litp, minus_litp, negate_p)
899 tree in;
900 enum tree_code code;
901 tree *conp, *litp, *minus_litp;
902 int negate_p;
903 {
904 tree var = 0;
905
906 *conp = 0;
907 *litp = 0;
908 *minus_litp = 0;
909
910 /* Strip any conversions that don't change the machine mode or signedness. */
911 STRIP_SIGN_NOPS (in);
912
913 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
914 *litp = in;
915 else if (TREE_CODE (in) == code
916 || (! FLOAT_TYPE_P (TREE_TYPE (in))
917 /* We can associate addition and subtraction together (even
918 though the C standard doesn't say so) for integers because
919 the value is not affected. For reals, the value might be
920 affected, so we can't. */
921 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
922 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
923 {
924 tree op0 = TREE_OPERAND (in, 0);
925 tree op1 = TREE_OPERAND (in, 1);
926 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
927 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
928
929 /* First see if either of the operands is a literal, then a constant. */
930 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
931 *litp = op0, op0 = 0;
932 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
933 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
934
935 if (op0 != 0 && TREE_CONSTANT (op0))
936 *conp = op0, op0 = 0;
937 else if (op1 != 0 && TREE_CONSTANT (op1))
938 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
939
940 /* If we haven't dealt with either operand, this is not a case we can
941 decompose. Otherwise, VAR is either of the ones remaining, if any. */
942 if (op0 != 0 && op1 != 0)
943 var = in;
944 else if (op0 != 0)
945 var = op0;
946 else
947 var = op1, neg_var_p = neg1_p;
948
949 /* Now do any needed negations. */
950 if (neg_litp_p)
951 *minus_litp = *litp, *litp = 0;
952 if (neg_conp_p)
953 *conp = negate_expr (*conp);
954 if (neg_var_p)
955 var = negate_expr (var);
956 }
957 else if (TREE_CONSTANT (in))
958 *conp = in;
959 else
960 var = in;
961
962 if (negate_p)
963 {
964 if (*litp)
965 *minus_litp = *litp, *litp = 0;
966 else if (*minus_litp)
967 *litp = *minus_litp, *minus_litp = 0;
968 *conp = negate_expr (*conp);
969 var = negate_expr (var);
970 }
971
972 return var;
973 }
974
975 /* Re-associate trees split by the above function. T1 and T2 are either
976 expressions to associate or null. Return the new expression, if any. If
977 we build an operation, do it in TYPE and with CODE. */
978
979 static tree
980 associate_trees (t1, t2, code, type)
981 tree t1, t2;
982 enum tree_code code;
983 tree type;
984 {
985 if (t1 == 0)
986 return t2;
987 else if (t2 == 0)
988 return t1;
989
990 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
991 try to fold this since we will have infinite recursion. But do
992 deal with any NEGATE_EXPRs. */
993 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
994 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
995 {
996 if (TREE_CODE (t1) == NEGATE_EXPR)
997 return build (MINUS_EXPR, type, convert (type, t2),
998 convert (type, TREE_OPERAND (t1, 0)));
999 else if (TREE_CODE (t2) == NEGATE_EXPR)
1000 return build (MINUS_EXPR, type, convert (type, t1),
1001 convert (type, TREE_OPERAND (t2, 0)));
1002 else
1003 return build (code, type, convert (type, t1), convert (type, t2));
1004 }
1005
1006 return fold (build (code, type, convert (type, t1), convert (type, t2)));
1007 }
1008 \f
1009 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1010 to produce a new constant.
1011
1012 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1013
1014 static tree
1015 int_const_binop (code, arg1, arg2, notrunc)
1016 enum tree_code code;
1017 tree arg1, arg2;
1018 int notrunc;
1019 {
1020 unsigned HOST_WIDE_INT int1l, int2l;
1021 HOST_WIDE_INT int1h, int2h;
1022 unsigned HOST_WIDE_INT low;
1023 HOST_WIDE_INT hi;
1024 unsigned HOST_WIDE_INT garbagel;
1025 HOST_WIDE_INT garbageh;
1026 tree t;
1027 tree type = TREE_TYPE (arg1);
1028 int uns = TREE_UNSIGNED (type);
1029 int is_sizetype
1030 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1031 int overflow = 0;
1032 int no_overflow = 0;
1033
1034 int1l = TREE_INT_CST_LOW (arg1);
1035 int1h = TREE_INT_CST_HIGH (arg1);
1036 int2l = TREE_INT_CST_LOW (arg2);
1037 int2h = TREE_INT_CST_HIGH (arg2);
1038
1039 switch (code)
1040 {
1041 case BIT_IOR_EXPR:
1042 low = int1l | int2l, hi = int1h | int2h;
1043 break;
1044
1045 case BIT_XOR_EXPR:
1046 low = int1l ^ int2l, hi = int1h ^ int2h;
1047 break;
1048
1049 case BIT_AND_EXPR:
1050 low = int1l & int2l, hi = int1h & int2h;
1051 break;
1052
1053 case BIT_ANDTC_EXPR:
1054 low = int1l & ~int2l, hi = int1h & ~int2h;
1055 break;
1056
1057 case RSHIFT_EXPR:
1058 int2l = -int2l;
1059 case LSHIFT_EXPR:
1060 /* It's unclear from the C standard whether shifts can overflow.
1061 The following code ignores overflow; perhaps a C standard
1062 interpretation ruling is needed. */
1063 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1064 &low, &hi, !uns);
1065 no_overflow = 1;
1066 break;
1067
1068 case RROTATE_EXPR:
1069 int2l = - int2l;
1070 case LROTATE_EXPR:
1071 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1072 &low, &hi);
1073 break;
1074
1075 case PLUS_EXPR:
1076 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1077 break;
1078
1079 case MINUS_EXPR:
1080 neg_double (int2l, int2h, &low, &hi);
1081 add_double (int1l, int1h, low, hi, &low, &hi);
1082 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1083 break;
1084
1085 case MULT_EXPR:
1086 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1087 break;
1088
1089 case TRUNC_DIV_EXPR:
1090 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1091 case EXACT_DIV_EXPR:
1092 /* This is a shortcut for a common special case. */
1093 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1094 && ! TREE_CONSTANT_OVERFLOW (arg1)
1095 && ! TREE_CONSTANT_OVERFLOW (arg2)
1096 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1097 {
1098 if (code == CEIL_DIV_EXPR)
1099 int1l += int2l - 1;
1100
1101 low = int1l / int2l, hi = 0;
1102 break;
1103 }
1104
1105 /* ... fall through ... */
1106
1107 case ROUND_DIV_EXPR:
1108 if (int2h == 0 && int2l == 1)
1109 {
1110 low = int1l, hi = int1h;
1111 break;
1112 }
1113 if (int1l == int2l && int1h == int2h
1114 && ! (int1l == 0 && int1h == 0))
1115 {
1116 low = 1, hi = 0;
1117 break;
1118 }
1119 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1120 &low, &hi, &garbagel, &garbageh);
1121 break;
1122
1123 case TRUNC_MOD_EXPR:
1124 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1125 /* This is a shortcut for a common special case. */
1126 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1127 && ! TREE_CONSTANT_OVERFLOW (arg1)
1128 && ! TREE_CONSTANT_OVERFLOW (arg2)
1129 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1130 {
1131 if (code == CEIL_MOD_EXPR)
1132 int1l += int2l - 1;
1133 low = int1l % int2l, hi = 0;
1134 break;
1135 }
1136
1137 /* ... fall through ... */
1138
1139 case ROUND_MOD_EXPR:
1140 overflow = div_and_round_double (code, uns,
1141 int1l, int1h, int2l, int2h,
1142 &garbagel, &garbageh, &low, &hi);
1143 break;
1144
1145 case MIN_EXPR:
1146 case MAX_EXPR:
1147 if (uns)
1148 low = (((unsigned HOST_WIDE_INT) int1h
1149 < (unsigned HOST_WIDE_INT) int2h)
1150 || (((unsigned HOST_WIDE_INT) int1h
1151 == (unsigned HOST_WIDE_INT) int2h)
1152 && int1l < int2l));
1153 else
1154 low = (int1h < int2h
1155 || (int1h == int2h && int1l < int2l));
1156
1157 if (low == (code == MIN_EXPR))
1158 low = int1l, hi = int1h;
1159 else
1160 low = int2l, hi = int2h;
1161 break;
1162
1163 default:
1164 abort ();
1165 }
1166
1167 /* If this is for a sizetype, can be represented as one (signed)
1168 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1169 constants. */
1170 if (is_sizetype
1171 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1172 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1173 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1174 return size_int_type_wide (low, type);
1175 else
1176 {
1177 t = build_int_2 (low, hi);
1178 TREE_TYPE (t) = TREE_TYPE (arg1);
1179 }
1180
1181 TREE_OVERFLOW (t)
1182 = ((notrunc
1183 ? (!uns || is_sizetype) && overflow
1184 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1185 && ! no_overflow))
1186 | TREE_OVERFLOW (arg1)
1187 | TREE_OVERFLOW (arg2));
1188
1189 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1190 So check if force_fit_type truncated the value. */
1191 if (is_sizetype
1192 && ! TREE_OVERFLOW (t)
1193 && (TREE_INT_CST_HIGH (t) != hi
1194 || TREE_INT_CST_LOW (t) != low))
1195 TREE_OVERFLOW (t) = 1;
1196
1197 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1198 | TREE_CONSTANT_OVERFLOW (arg1)
1199 | TREE_CONSTANT_OVERFLOW (arg2));
1200 return t;
1201 }
1202
1203 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1204 constant. We assume ARG1 and ARG2 have the same data type, or at least
1205 are the same kind of constant and the same machine mode.
1206
1207 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1208
1209 static tree
1210 const_binop (code, arg1, arg2, notrunc)
1211 enum tree_code code;
1212 tree arg1, arg2;
1213 int notrunc;
1214 {
1215 STRIP_NOPS (arg1);
1216 STRIP_NOPS (arg2);
1217
1218 if (TREE_CODE (arg1) == INTEGER_CST)
1219 return int_const_binop (code, arg1, arg2, notrunc);
1220
1221 if (TREE_CODE (arg1) == REAL_CST)
1222 {
1223 REAL_VALUE_TYPE d1;
1224 REAL_VALUE_TYPE d2;
1225 REAL_VALUE_TYPE value;
1226 tree t;
1227
1228 d1 = TREE_REAL_CST (arg1);
1229 d2 = TREE_REAL_CST (arg2);
1230
1231 /* If either operand is a NaN, just return it. Otherwise, set up
1232 for floating-point trap; we return an overflow. */
1233 if (REAL_VALUE_ISNAN (d1))
1234 return arg1;
1235 else if (REAL_VALUE_ISNAN (d2))
1236 return arg2;
1237
1238 REAL_ARITHMETIC (value, code, d1, d2);
1239
1240 t = build_real (TREE_TYPE (arg1),
1241 real_value_truncate (TYPE_MODE (TREE_TYPE (arg1)),
1242 value));
1243
1244 TREE_OVERFLOW (t)
1245 = (force_fit_type (t, 0)
1246 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1247 TREE_CONSTANT_OVERFLOW (t)
1248 = TREE_OVERFLOW (t)
1249 | TREE_CONSTANT_OVERFLOW (arg1)
1250 | TREE_CONSTANT_OVERFLOW (arg2);
1251 return t;
1252 }
1253 if (TREE_CODE (arg1) == COMPLEX_CST)
1254 {
1255 tree type = TREE_TYPE (arg1);
1256 tree r1 = TREE_REALPART (arg1);
1257 tree i1 = TREE_IMAGPART (arg1);
1258 tree r2 = TREE_REALPART (arg2);
1259 tree i2 = TREE_IMAGPART (arg2);
1260 tree t;
1261
1262 switch (code)
1263 {
1264 case PLUS_EXPR:
1265 t = build_complex (type,
1266 const_binop (PLUS_EXPR, r1, r2, notrunc),
1267 const_binop (PLUS_EXPR, i1, i2, notrunc));
1268 break;
1269
1270 case MINUS_EXPR:
1271 t = build_complex (type,
1272 const_binop (MINUS_EXPR, r1, r2, notrunc),
1273 const_binop (MINUS_EXPR, i1, i2, notrunc));
1274 break;
1275
1276 case MULT_EXPR:
1277 t = build_complex (type,
1278 const_binop (MINUS_EXPR,
1279 const_binop (MULT_EXPR,
1280 r1, r2, notrunc),
1281 const_binop (MULT_EXPR,
1282 i1, i2, notrunc),
1283 notrunc),
1284 const_binop (PLUS_EXPR,
1285 const_binop (MULT_EXPR,
1286 r1, i2, notrunc),
1287 const_binop (MULT_EXPR,
1288 i1, r2, notrunc),
1289 notrunc));
1290 break;
1291
1292 case RDIV_EXPR:
1293 {
1294 tree magsquared
1295 = const_binop (PLUS_EXPR,
1296 const_binop (MULT_EXPR, r2, r2, notrunc),
1297 const_binop (MULT_EXPR, i2, i2, notrunc),
1298 notrunc);
1299
1300 t = build_complex (type,
1301 const_binop
1302 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1303 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1304 const_binop (PLUS_EXPR,
1305 const_binop (MULT_EXPR, r1, r2,
1306 notrunc),
1307 const_binop (MULT_EXPR, i1, i2,
1308 notrunc),
1309 notrunc),
1310 magsquared, notrunc),
1311 const_binop
1312 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1313 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1314 const_binop (MINUS_EXPR,
1315 const_binop (MULT_EXPR, i1, r2,
1316 notrunc),
1317 const_binop (MULT_EXPR, r1, i2,
1318 notrunc),
1319 notrunc),
1320 magsquared, notrunc));
1321 }
1322 break;
1323
1324 default:
1325 abort ();
1326 }
1327 return t;
1328 }
1329 return 0;
1330 }
1331
1332 /* These are the hash table functions for the hash table of INTEGER_CST
1333 nodes of a sizetype. */
1334
1335 /* Return the hash code code X, an INTEGER_CST. */
1336
1337 static hashval_t
1338 size_htab_hash (x)
1339 const void *x;
1340 {
1341 tree t = (tree) x;
1342
1343 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1344 ^ (hashval_t) ((long) TREE_TYPE (t) >> 3)
1345 ^ (TREE_OVERFLOW (t) << 20));
1346 }
1347
1348 /* Return non-zero if the value represented by *X (an INTEGER_CST tree node)
1349 is the same as that given by *Y, which is the same. */
1350
1351 static int
1352 size_htab_eq (x, y)
1353 const void *x;
1354 const void *y;
1355 {
1356 tree xt = (tree) x;
1357 tree yt = (tree) y;
1358
1359 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1360 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1361 && TREE_TYPE (xt) == TREE_TYPE (yt)
1362 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1363 }
1364 \f
1365 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1366 bits are given by NUMBER and of the sizetype represented by KIND. */
1367
1368 tree
1369 size_int_wide (number, kind)
1370 HOST_WIDE_INT number;
1371 enum size_type_kind kind;
1372 {
1373 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1374 }
1375
1376 /* Likewise, but the desired type is specified explicitly. */
1377
1378 tree
1379 size_int_type_wide (number, type)
1380 HOST_WIDE_INT number;
1381 tree type;
1382 {
1383 static htab_t size_htab = 0;
1384 static tree new_const = 0;
1385 PTR *slot;
1386
1387 if (size_htab == 0)
1388 {
1389 size_htab = htab_create (1024, size_htab_hash, size_htab_eq, NULL);
1390 ggc_add_deletable_htab (size_htab, NULL, NULL);
1391 new_const = make_node (INTEGER_CST);
1392 ggc_add_tree_root (&new_const, 1);
1393 }
1394
1395 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1396 hash table, we return the value from the hash table. Otherwise, we
1397 place that in the hash table and make a new node for the next time. */
1398 TREE_INT_CST_LOW (new_const) = number;
1399 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1400 TREE_TYPE (new_const) = type;
1401 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1402 = force_fit_type (new_const, 0);
1403
1404 slot = htab_find_slot (size_htab, new_const, INSERT);
1405 if (*slot == 0)
1406 {
1407 tree t = new_const;
1408
1409 *slot = (PTR) new_const;
1410 new_const = make_node (INTEGER_CST);
1411 return t;
1412 }
1413 else
1414 return (tree) *slot;
1415 }
1416
1417 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1418 is a tree code. The type of the result is taken from the operands.
1419 Both must be the same type integer type and it must be a size type.
1420 If the operands are constant, so is the result. */
1421
1422 tree
1423 size_binop (code, arg0, arg1)
1424 enum tree_code code;
1425 tree arg0, arg1;
1426 {
1427 tree type = TREE_TYPE (arg0);
1428
1429 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1430 || type != TREE_TYPE (arg1))
1431 abort ();
1432
1433 /* Handle the special case of two integer constants faster. */
1434 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1435 {
1436 /* And some specific cases even faster than that. */
1437 if (code == PLUS_EXPR && integer_zerop (arg0))
1438 return arg1;
1439 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1440 && integer_zerop (arg1))
1441 return arg0;
1442 else if (code == MULT_EXPR && integer_onep (arg0))
1443 return arg1;
1444
1445 /* Handle general case of two integer constants. */
1446 return int_const_binop (code, arg0, arg1, 0);
1447 }
1448
1449 if (arg0 == error_mark_node || arg1 == error_mark_node)
1450 return error_mark_node;
1451
1452 return fold (build (code, type, arg0, arg1));
1453 }
1454
1455 /* Given two values, either both of sizetype or both of bitsizetype,
1456 compute the difference between the two values. Return the value
1457 in signed type corresponding to the type of the operands. */
1458
1459 tree
1460 size_diffop (arg0, arg1)
1461 tree arg0, arg1;
1462 {
1463 tree type = TREE_TYPE (arg0);
1464 tree ctype;
1465
1466 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1467 || type != TREE_TYPE (arg1))
1468 abort ();
1469
1470 /* If the type is already signed, just do the simple thing. */
1471 if (! TREE_UNSIGNED (type))
1472 return size_binop (MINUS_EXPR, arg0, arg1);
1473
1474 ctype = (type == bitsizetype || type == ubitsizetype
1475 ? sbitsizetype : ssizetype);
1476
1477 /* If either operand is not a constant, do the conversions to the signed
1478 type and subtract. The hardware will do the right thing with any
1479 overflow in the subtraction. */
1480 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1481 return size_binop (MINUS_EXPR, convert (ctype, arg0),
1482 convert (ctype, arg1));
1483
1484 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1485 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1486 overflow) and negate (which can't either). Special-case a result
1487 of zero while we're here. */
1488 if (tree_int_cst_equal (arg0, arg1))
1489 return convert (ctype, integer_zero_node);
1490 else if (tree_int_cst_lt (arg1, arg0))
1491 return convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1492 else
1493 return size_binop (MINUS_EXPR, convert (ctype, integer_zero_node),
1494 convert (ctype, size_binop (MINUS_EXPR, arg1, arg0)));
1495 }
1496 \f
1497
1498 /* Given T, a tree representing type conversion of ARG1, a constant,
1499 return a constant tree representing the result of conversion. */
1500
1501 static tree
1502 fold_convert (t, arg1)
1503 tree t;
1504 tree arg1;
1505 {
1506 tree type = TREE_TYPE (t);
1507 int overflow = 0;
1508
1509 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1510 {
1511 if (TREE_CODE (arg1) == INTEGER_CST)
1512 {
1513 /* If we would build a constant wider than GCC supports,
1514 leave the conversion unfolded. */
1515 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1516 return t;
1517
1518 /* If we are trying to make a sizetype for a small integer, use
1519 size_int to pick up cached types to reduce duplicate nodes. */
1520 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1521 && !TREE_CONSTANT_OVERFLOW (arg1)
1522 && compare_tree_int (arg1, 10000) < 0)
1523 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1524
1525 /* Given an integer constant, make new constant with new type,
1526 appropriately sign-extended or truncated. */
1527 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1528 TREE_INT_CST_HIGH (arg1));
1529 TREE_TYPE (t) = type;
1530 /* Indicate an overflow if (1) ARG1 already overflowed,
1531 or (2) force_fit_type indicates an overflow.
1532 Tell force_fit_type that an overflow has already occurred
1533 if ARG1 is a too-large unsigned value and T is signed.
1534 But don't indicate an overflow if converting a pointer. */
1535 TREE_OVERFLOW (t)
1536 = ((force_fit_type (t,
1537 (TREE_INT_CST_HIGH (arg1) < 0
1538 && (TREE_UNSIGNED (type)
1539 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1540 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1541 || TREE_OVERFLOW (arg1));
1542 TREE_CONSTANT_OVERFLOW (t)
1543 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1544 }
1545 else if (TREE_CODE (arg1) == REAL_CST)
1546 {
1547 /* Don't initialize these, use assignments.
1548 Initialized local aggregates don't work on old compilers. */
1549 REAL_VALUE_TYPE x;
1550 REAL_VALUE_TYPE l;
1551 REAL_VALUE_TYPE u;
1552 tree type1 = TREE_TYPE (arg1);
1553 int no_upper_bound;
1554
1555 x = TREE_REAL_CST (arg1);
1556 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1557
1558 no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
1559 if (!no_upper_bound)
1560 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1561
1562 /* See if X will be in range after truncation towards 0.
1563 To compensate for truncation, move the bounds away from 0,
1564 but reject if X exactly equals the adjusted bounds. */
1565 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1566 if (!no_upper_bound)
1567 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1568 /* If X is a NaN, use zero instead and show we have an overflow.
1569 Otherwise, range check. */
1570 if (REAL_VALUE_ISNAN (x))
1571 overflow = 1, x = dconst0;
1572 else if (! (REAL_VALUES_LESS (l, x)
1573 && !no_upper_bound
1574 && REAL_VALUES_LESS (x, u)))
1575 overflow = 1;
1576
1577 {
1578 HOST_WIDE_INT low, high;
1579 REAL_VALUE_TO_INT (&low, &high, x);
1580 t = build_int_2 (low, high);
1581 }
1582 TREE_TYPE (t) = type;
1583 TREE_OVERFLOW (t)
1584 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1585 TREE_CONSTANT_OVERFLOW (t)
1586 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1587 }
1588 TREE_TYPE (t) = type;
1589 }
1590 else if (TREE_CODE (type) == REAL_TYPE)
1591 {
1592 if (TREE_CODE (arg1) == INTEGER_CST)
1593 return build_real_from_int_cst (type, arg1);
1594 if (TREE_CODE (arg1) == REAL_CST)
1595 {
1596 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1597 {
1598 t = arg1;
1599 TREE_TYPE (arg1) = type;
1600 return t;
1601 }
1602
1603 t = build_real (type,
1604 real_value_truncate (TYPE_MODE (type),
1605 TREE_REAL_CST (arg1)));
1606
1607 TREE_OVERFLOW (t)
1608 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1609 TREE_CONSTANT_OVERFLOW (t)
1610 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1611 return t;
1612 }
1613 }
1614 TREE_CONSTANT (t) = 1;
1615 return t;
1616 }
1617 \f
1618 /* Return an expr equal to X but certainly not valid as an lvalue. */
1619
1620 tree
1621 non_lvalue (x)
1622 tree x;
1623 {
1624 tree result;
1625
1626 /* These things are certainly not lvalues. */
1627 if (TREE_CODE (x) == NON_LVALUE_EXPR
1628 || TREE_CODE (x) == INTEGER_CST
1629 || TREE_CODE (x) == REAL_CST
1630 || TREE_CODE (x) == STRING_CST
1631 || TREE_CODE (x) == ADDR_EXPR)
1632 return x;
1633
1634 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1635 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1636 return result;
1637 }
1638
1639 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1640 Zero means allow extended lvalues. */
1641
1642 int pedantic_lvalues;
1643
1644 /* When pedantic, return an expr equal to X but certainly not valid as a
1645 pedantic lvalue. Otherwise, return X. */
1646
1647 tree
1648 pedantic_non_lvalue (x)
1649 tree x;
1650 {
1651 if (pedantic_lvalues)
1652 return non_lvalue (x);
1653 else
1654 return x;
1655 }
1656 \f
1657 /* Given a tree comparison code, return the code that is the logical inverse
1658 of the given code. It is not safe to do this for floating-point
1659 comparisons, except for NE_EXPR and EQ_EXPR. */
1660
1661 static enum tree_code
1662 invert_tree_comparison (code)
1663 enum tree_code code;
1664 {
1665 switch (code)
1666 {
1667 case EQ_EXPR:
1668 return NE_EXPR;
1669 case NE_EXPR:
1670 return EQ_EXPR;
1671 case GT_EXPR:
1672 return LE_EXPR;
1673 case GE_EXPR:
1674 return LT_EXPR;
1675 case LT_EXPR:
1676 return GE_EXPR;
1677 case LE_EXPR:
1678 return GT_EXPR;
1679 default:
1680 abort ();
1681 }
1682 }
1683
1684 /* Similar, but return the comparison that results if the operands are
1685 swapped. This is safe for floating-point. */
1686
1687 static enum tree_code
1688 swap_tree_comparison (code)
1689 enum tree_code code;
1690 {
1691 switch (code)
1692 {
1693 case EQ_EXPR:
1694 case NE_EXPR:
1695 return code;
1696 case GT_EXPR:
1697 return LT_EXPR;
1698 case GE_EXPR:
1699 return LE_EXPR;
1700 case LT_EXPR:
1701 return GT_EXPR;
1702 case LE_EXPR:
1703 return GE_EXPR;
1704 default:
1705 abort ();
1706 }
1707 }
1708
1709 /* Return nonzero if CODE is a tree code that represents a truth value. */
1710
1711 static int
1712 truth_value_p (code)
1713 enum tree_code code;
1714 {
1715 return (TREE_CODE_CLASS (code) == '<'
1716 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1717 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1718 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1719 }
1720 \f
1721 /* Return nonzero if two operands are necessarily equal.
1722 If ONLY_CONST is non-zero, only return non-zero for constants.
1723 This function tests whether the operands are indistinguishable;
1724 it does not test whether they are equal using C's == operation.
1725 The distinction is important for IEEE floating point, because
1726 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1727 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1728
1729 int
1730 operand_equal_p (arg0, arg1, only_const)
1731 tree arg0, arg1;
1732 int only_const;
1733 {
1734 /* If both types don't have the same signedness, then we can't consider
1735 them equal. We must check this before the STRIP_NOPS calls
1736 because they may change the signedness of the arguments. */
1737 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1738 return 0;
1739
1740 STRIP_NOPS (arg0);
1741 STRIP_NOPS (arg1);
1742
1743 if (TREE_CODE (arg0) != TREE_CODE (arg1)
1744 /* This is needed for conversions and for COMPONENT_REF.
1745 Might as well play it safe and always test this. */
1746 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
1747 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
1748 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1749 return 0;
1750
1751 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1752 We don't care about side effects in that case because the SAVE_EXPR
1753 takes care of that for us. In all other cases, two expressions are
1754 equal if they have no side effects. If we have two identical
1755 expressions with side effects that should be treated the same due
1756 to the only side effects being identical SAVE_EXPR's, that will
1757 be detected in the recursive calls below. */
1758 if (arg0 == arg1 && ! only_const
1759 && (TREE_CODE (arg0) == SAVE_EXPR
1760 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
1761 return 1;
1762
1763 /* Next handle constant cases, those for which we can return 1 even
1764 if ONLY_CONST is set. */
1765 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
1766 switch (TREE_CODE (arg0))
1767 {
1768 case INTEGER_CST:
1769 return (! TREE_CONSTANT_OVERFLOW (arg0)
1770 && ! TREE_CONSTANT_OVERFLOW (arg1)
1771 && tree_int_cst_equal (arg0, arg1));
1772
1773 case REAL_CST:
1774 return (! TREE_CONSTANT_OVERFLOW (arg0)
1775 && ! TREE_CONSTANT_OVERFLOW (arg1)
1776 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
1777 TREE_REAL_CST (arg1)));
1778
1779 case VECTOR_CST:
1780 {
1781 tree v1, v2;
1782
1783 if (TREE_CONSTANT_OVERFLOW (arg0)
1784 || TREE_CONSTANT_OVERFLOW (arg1))
1785 return 0;
1786
1787 v1 = TREE_VECTOR_CST_ELTS (arg0);
1788 v2 = TREE_VECTOR_CST_ELTS (arg1);
1789 while (v1 && v2)
1790 {
1791 if (!operand_equal_p (v1, v2, only_const))
1792 return 0;
1793 v1 = TREE_CHAIN (v1);
1794 v2 = TREE_CHAIN (v2);
1795 }
1796
1797 return 1;
1798 }
1799
1800 case COMPLEX_CST:
1801 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
1802 only_const)
1803 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
1804 only_const));
1805
1806 case STRING_CST:
1807 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
1808 && ! memcmp (TREE_STRING_POINTER (arg0),
1809 TREE_STRING_POINTER (arg1),
1810 TREE_STRING_LENGTH (arg0)));
1811
1812 case ADDR_EXPR:
1813 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
1814 0);
1815 default:
1816 break;
1817 }
1818
1819 if (only_const)
1820 return 0;
1821
1822 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1823 {
1824 case '1':
1825 /* Two conversions are equal only if signedness and modes match. */
1826 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1827 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1828 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1829 return 0;
1830
1831 return operand_equal_p (TREE_OPERAND (arg0, 0),
1832 TREE_OPERAND (arg1, 0), 0);
1833
1834 case '<':
1835 case '2':
1836 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
1837 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
1838 0))
1839 return 1;
1840
1841 /* For commutative ops, allow the other order. */
1842 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
1843 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
1844 || TREE_CODE (arg0) == BIT_IOR_EXPR
1845 || TREE_CODE (arg0) == BIT_XOR_EXPR
1846 || TREE_CODE (arg0) == BIT_AND_EXPR
1847 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
1848 && operand_equal_p (TREE_OPERAND (arg0, 0),
1849 TREE_OPERAND (arg1, 1), 0)
1850 && operand_equal_p (TREE_OPERAND (arg0, 1),
1851 TREE_OPERAND (arg1, 0), 0));
1852
1853 case 'r':
1854 /* If either of the pointer (or reference) expressions we are dereferencing
1855 contain a side effect, these cannot be equal. */
1856 if (TREE_SIDE_EFFECTS (arg0)
1857 || TREE_SIDE_EFFECTS (arg1))
1858 return 0;
1859
1860 switch (TREE_CODE (arg0))
1861 {
1862 case INDIRECT_REF:
1863 return operand_equal_p (TREE_OPERAND (arg0, 0),
1864 TREE_OPERAND (arg1, 0), 0);
1865
1866 case COMPONENT_REF:
1867 case ARRAY_REF:
1868 case ARRAY_RANGE_REF:
1869 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1870 TREE_OPERAND (arg1, 0), 0)
1871 && operand_equal_p (TREE_OPERAND (arg0, 1),
1872 TREE_OPERAND (arg1, 1), 0));
1873
1874 case BIT_FIELD_REF:
1875 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1876 TREE_OPERAND (arg1, 0), 0)
1877 && operand_equal_p (TREE_OPERAND (arg0, 1),
1878 TREE_OPERAND (arg1, 1), 0)
1879 && operand_equal_p (TREE_OPERAND (arg0, 2),
1880 TREE_OPERAND (arg1, 2), 0));
1881 default:
1882 return 0;
1883 }
1884
1885 case 'e':
1886 if (TREE_CODE (arg0) == RTL_EXPR)
1887 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
1888 return 0;
1889
1890 default:
1891 return 0;
1892 }
1893 }
1894 \f
1895 /* Similar to operand_equal_p, but see if ARG0 might have been made by
1896 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1897
1898 When in doubt, return 0. */
1899
1900 static int
1901 operand_equal_for_comparison_p (arg0, arg1, other)
1902 tree arg0, arg1;
1903 tree other;
1904 {
1905 int unsignedp1, unsignedpo;
1906 tree primarg0, primarg1, primother;
1907 unsigned int correct_width;
1908
1909 if (operand_equal_p (arg0, arg1, 0))
1910 return 1;
1911
1912 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
1913 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
1914 return 0;
1915
1916 /* Discard any conversions that don't change the modes of ARG0 and ARG1
1917 and see if the inner values are the same. This removes any
1918 signedness comparison, which doesn't matter here. */
1919 primarg0 = arg0, primarg1 = arg1;
1920 STRIP_NOPS (primarg0);
1921 STRIP_NOPS (primarg1);
1922 if (operand_equal_p (primarg0, primarg1, 0))
1923 return 1;
1924
1925 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
1926 actual comparison operand, ARG0.
1927
1928 First throw away any conversions to wider types
1929 already present in the operands. */
1930
1931 primarg1 = get_narrower (arg1, &unsignedp1);
1932 primother = get_narrower (other, &unsignedpo);
1933
1934 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
1935 if (unsignedp1 == unsignedpo
1936 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
1937 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
1938 {
1939 tree type = TREE_TYPE (arg0);
1940
1941 /* Make sure shorter operand is extended the right way
1942 to match the longer operand. */
1943 primarg1 = convert ((*lang_hooks.types.signed_or_unsigned_type)
1944 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
1945
1946 if (operand_equal_p (arg0, convert (type, primarg1), 0))
1947 return 1;
1948 }
1949
1950 return 0;
1951 }
1952 \f
1953 /* See if ARG is an expression that is either a comparison or is performing
1954 arithmetic on comparisons. The comparisons must only be comparing
1955 two different values, which will be stored in *CVAL1 and *CVAL2; if
1956 they are non-zero it means that some operands have already been found.
1957 No variables may be used anywhere else in the expression except in the
1958 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
1959 the expression and save_expr needs to be called with CVAL1 and CVAL2.
1960
1961 If this is true, return 1. Otherwise, return zero. */
1962
1963 static int
1964 twoval_comparison_p (arg, cval1, cval2, save_p)
1965 tree arg;
1966 tree *cval1, *cval2;
1967 int *save_p;
1968 {
1969 enum tree_code code = TREE_CODE (arg);
1970 char class = TREE_CODE_CLASS (code);
1971
1972 /* We can handle some of the 'e' cases here. */
1973 if (class == 'e' && code == TRUTH_NOT_EXPR)
1974 class = '1';
1975 else if (class == 'e'
1976 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
1977 || code == COMPOUND_EXPR))
1978 class = '2';
1979
1980 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
1981 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
1982 {
1983 /* If we've already found a CVAL1 or CVAL2, this expression is
1984 two complex to handle. */
1985 if (*cval1 || *cval2)
1986 return 0;
1987
1988 class = '1';
1989 *save_p = 1;
1990 }
1991
1992 switch (class)
1993 {
1994 case '1':
1995 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
1996
1997 case '2':
1998 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
1999 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2000 cval1, cval2, save_p));
2001
2002 case 'c':
2003 return 1;
2004
2005 case 'e':
2006 if (code == COND_EXPR)
2007 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2008 cval1, cval2, save_p)
2009 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2010 cval1, cval2, save_p)
2011 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2012 cval1, cval2, save_p));
2013 return 0;
2014
2015 case '<':
2016 /* First see if we can handle the first operand, then the second. For
2017 the second operand, we know *CVAL1 can't be zero. It must be that
2018 one side of the comparison is each of the values; test for the
2019 case where this isn't true by failing if the two operands
2020 are the same. */
2021
2022 if (operand_equal_p (TREE_OPERAND (arg, 0),
2023 TREE_OPERAND (arg, 1), 0))
2024 return 0;
2025
2026 if (*cval1 == 0)
2027 *cval1 = TREE_OPERAND (arg, 0);
2028 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2029 ;
2030 else if (*cval2 == 0)
2031 *cval2 = TREE_OPERAND (arg, 0);
2032 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2033 ;
2034 else
2035 return 0;
2036
2037 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2038 ;
2039 else if (*cval2 == 0)
2040 *cval2 = TREE_OPERAND (arg, 1);
2041 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2042 ;
2043 else
2044 return 0;
2045
2046 return 1;
2047
2048 default:
2049 return 0;
2050 }
2051 }
2052 \f
2053 /* ARG is a tree that is known to contain just arithmetic operations and
2054 comparisons. Evaluate the operations in the tree substituting NEW0 for
2055 any occurrence of OLD0 as an operand of a comparison and likewise for
2056 NEW1 and OLD1. */
2057
2058 static tree
2059 eval_subst (arg, old0, new0, old1, new1)
2060 tree arg;
2061 tree old0, new0, old1, new1;
2062 {
2063 tree type = TREE_TYPE (arg);
2064 enum tree_code code = TREE_CODE (arg);
2065 char class = TREE_CODE_CLASS (code);
2066
2067 /* We can handle some of the 'e' cases here. */
2068 if (class == 'e' && code == TRUTH_NOT_EXPR)
2069 class = '1';
2070 else if (class == 'e'
2071 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2072 class = '2';
2073
2074 switch (class)
2075 {
2076 case '1':
2077 return fold (build1 (code, type,
2078 eval_subst (TREE_OPERAND (arg, 0),
2079 old0, new0, old1, new1)));
2080
2081 case '2':
2082 return fold (build (code, type,
2083 eval_subst (TREE_OPERAND (arg, 0),
2084 old0, new0, old1, new1),
2085 eval_subst (TREE_OPERAND (arg, 1),
2086 old0, new0, old1, new1)));
2087
2088 case 'e':
2089 switch (code)
2090 {
2091 case SAVE_EXPR:
2092 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2093
2094 case COMPOUND_EXPR:
2095 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2096
2097 case COND_EXPR:
2098 return fold (build (code, type,
2099 eval_subst (TREE_OPERAND (arg, 0),
2100 old0, new0, old1, new1),
2101 eval_subst (TREE_OPERAND (arg, 1),
2102 old0, new0, old1, new1),
2103 eval_subst (TREE_OPERAND (arg, 2),
2104 old0, new0, old1, new1)));
2105 default:
2106 break;
2107 }
2108 /* fall through - ??? */
2109
2110 case '<':
2111 {
2112 tree arg0 = TREE_OPERAND (arg, 0);
2113 tree arg1 = TREE_OPERAND (arg, 1);
2114
2115 /* We need to check both for exact equality and tree equality. The
2116 former will be true if the operand has a side-effect. In that
2117 case, we know the operand occurred exactly once. */
2118
2119 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2120 arg0 = new0;
2121 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2122 arg0 = new1;
2123
2124 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2125 arg1 = new0;
2126 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2127 arg1 = new1;
2128
2129 return fold (build (code, type, arg0, arg1));
2130 }
2131
2132 default:
2133 return arg;
2134 }
2135 }
2136 \f
2137 /* Return a tree for the case when the result of an expression is RESULT
2138 converted to TYPE and OMITTED was previously an operand of the expression
2139 but is now not needed (e.g., we folded OMITTED * 0).
2140
2141 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2142 the conversion of RESULT to TYPE. */
2143
2144 static tree
2145 omit_one_operand (type, result, omitted)
2146 tree type, result, omitted;
2147 {
2148 tree t = convert (type, result);
2149
2150 if (TREE_SIDE_EFFECTS (omitted))
2151 return build (COMPOUND_EXPR, type, omitted, t);
2152
2153 return non_lvalue (t);
2154 }
2155
2156 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2157
2158 static tree
2159 pedantic_omit_one_operand (type, result, omitted)
2160 tree type, result, omitted;
2161 {
2162 tree t = convert (type, result);
2163
2164 if (TREE_SIDE_EFFECTS (omitted))
2165 return build (COMPOUND_EXPR, type, omitted, t);
2166
2167 return pedantic_non_lvalue (t);
2168 }
2169 \f
2170 /* Return a simplified tree node for the truth-negation of ARG. This
2171 never alters ARG itself. We assume that ARG is an operation that
2172 returns a truth value (0 or 1). */
2173
2174 tree
2175 invert_truthvalue (arg)
2176 tree arg;
2177 {
2178 tree type = TREE_TYPE (arg);
2179 enum tree_code code = TREE_CODE (arg);
2180
2181 if (code == ERROR_MARK)
2182 return arg;
2183
2184 /* If this is a comparison, we can simply invert it, except for
2185 floating-point non-equality comparisons, in which case we just
2186 enclose a TRUTH_NOT_EXPR around what we have. */
2187
2188 if (TREE_CODE_CLASS (code) == '<')
2189 {
2190 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2191 && !flag_unsafe_math_optimizations
2192 && code != NE_EXPR
2193 && code != EQ_EXPR)
2194 return build1 (TRUTH_NOT_EXPR, type, arg);
2195 else
2196 return build (invert_tree_comparison (code), type,
2197 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2198 }
2199
2200 switch (code)
2201 {
2202 case INTEGER_CST:
2203 return convert (type, build_int_2 (integer_zerop (arg), 0));
2204
2205 case TRUTH_AND_EXPR:
2206 return build (TRUTH_OR_EXPR, type,
2207 invert_truthvalue (TREE_OPERAND (arg, 0)),
2208 invert_truthvalue (TREE_OPERAND (arg, 1)));
2209
2210 case TRUTH_OR_EXPR:
2211 return build (TRUTH_AND_EXPR, type,
2212 invert_truthvalue (TREE_OPERAND (arg, 0)),
2213 invert_truthvalue (TREE_OPERAND (arg, 1)));
2214
2215 case TRUTH_XOR_EXPR:
2216 /* Here we can invert either operand. We invert the first operand
2217 unless the second operand is a TRUTH_NOT_EXPR in which case our
2218 result is the XOR of the first operand with the inside of the
2219 negation of the second operand. */
2220
2221 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2222 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2223 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2224 else
2225 return build (TRUTH_XOR_EXPR, type,
2226 invert_truthvalue (TREE_OPERAND (arg, 0)),
2227 TREE_OPERAND (arg, 1));
2228
2229 case TRUTH_ANDIF_EXPR:
2230 return build (TRUTH_ORIF_EXPR, type,
2231 invert_truthvalue (TREE_OPERAND (arg, 0)),
2232 invert_truthvalue (TREE_OPERAND (arg, 1)));
2233
2234 case TRUTH_ORIF_EXPR:
2235 return build (TRUTH_ANDIF_EXPR, type,
2236 invert_truthvalue (TREE_OPERAND (arg, 0)),
2237 invert_truthvalue (TREE_OPERAND (arg, 1)));
2238
2239 case TRUTH_NOT_EXPR:
2240 return TREE_OPERAND (arg, 0);
2241
2242 case COND_EXPR:
2243 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2244 invert_truthvalue (TREE_OPERAND (arg, 1)),
2245 invert_truthvalue (TREE_OPERAND (arg, 2)));
2246
2247 case COMPOUND_EXPR:
2248 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2249 invert_truthvalue (TREE_OPERAND (arg, 1)));
2250
2251 case WITH_RECORD_EXPR:
2252 return build (WITH_RECORD_EXPR, type,
2253 invert_truthvalue (TREE_OPERAND (arg, 0)),
2254 TREE_OPERAND (arg, 1));
2255
2256 case NON_LVALUE_EXPR:
2257 return invert_truthvalue (TREE_OPERAND (arg, 0));
2258
2259 case NOP_EXPR:
2260 case CONVERT_EXPR:
2261 case FLOAT_EXPR:
2262 return build1 (TREE_CODE (arg), type,
2263 invert_truthvalue (TREE_OPERAND (arg, 0)));
2264
2265 case BIT_AND_EXPR:
2266 if (!integer_onep (TREE_OPERAND (arg, 1)))
2267 break;
2268 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2269
2270 case SAVE_EXPR:
2271 return build1 (TRUTH_NOT_EXPR, type, arg);
2272
2273 case CLEANUP_POINT_EXPR:
2274 return build1 (CLEANUP_POINT_EXPR, type,
2275 invert_truthvalue (TREE_OPERAND (arg, 0)));
2276
2277 default:
2278 break;
2279 }
2280 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2281 abort ();
2282 return build1 (TRUTH_NOT_EXPR, type, arg);
2283 }
2284
2285 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2286 operands are another bit-wise operation with a common input. If so,
2287 distribute the bit operations to save an operation and possibly two if
2288 constants are involved. For example, convert
2289 (A | B) & (A | C) into A | (B & C)
2290 Further simplification will occur if B and C are constants.
2291
2292 If this optimization cannot be done, 0 will be returned. */
2293
2294 static tree
2295 distribute_bit_expr (code, type, arg0, arg1)
2296 enum tree_code code;
2297 tree type;
2298 tree arg0, arg1;
2299 {
2300 tree common;
2301 tree left, right;
2302
2303 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2304 || TREE_CODE (arg0) == code
2305 || (TREE_CODE (arg0) != BIT_AND_EXPR
2306 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2307 return 0;
2308
2309 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2310 {
2311 common = TREE_OPERAND (arg0, 0);
2312 left = TREE_OPERAND (arg0, 1);
2313 right = TREE_OPERAND (arg1, 1);
2314 }
2315 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2316 {
2317 common = TREE_OPERAND (arg0, 0);
2318 left = TREE_OPERAND (arg0, 1);
2319 right = TREE_OPERAND (arg1, 0);
2320 }
2321 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2322 {
2323 common = TREE_OPERAND (arg0, 1);
2324 left = TREE_OPERAND (arg0, 0);
2325 right = TREE_OPERAND (arg1, 1);
2326 }
2327 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2328 {
2329 common = TREE_OPERAND (arg0, 1);
2330 left = TREE_OPERAND (arg0, 0);
2331 right = TREE_OPERAND (arg1, 0);
2332 }
2333 else
2334 return 0;
2335
2336 return fold (build (TREE_CODE (arg0), type, common,
2337 fold (build (code, type, left, right))));
2338 }
2339 \f
2340 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2341 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2342
2343 static tree
2344 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2345 tree inner;
2346 tree type;
2347 int bitsize, bitpos;
2348 int unsignedp;
2349 {
2350 tree result = build (BIT_FIELD_REF, type, inner,
2351 size_int (bitsize), bitsize_int (bitpos));
2352
2353 TREE_UNSIGNED (result) = unsignedp;
2354
2355 return result;
2356 }
2357
2358 /* Optimize a bit-field compare.
2359
2360 There are two cases: First is a compare against a constant and the
2361 second is a comparison of two items where the fields are at the same
2362 bit position relative to the start of a chunk (byte, halfword, word)
2363 large enough to contain it. In these cases we can avoid the shift
2364 implicit in bitfield extractions.
2365
2366 For constants, we emit a compare of the shifted constant with the
2367 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2368 compared. For two fields at the same position, we do the ANDs with the
2369 similar mask and compare the result of the ANDs.
2370
2371 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2372 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2373 are the left and right operands of the comparison, respectively.
2374
2375 If the optimization described above can be done, we return the resulting
2376 tree. Otherwise we return zero. */
2377
2378 static tree
2379 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2380 enum tree_code code;
2381 tree compare_type;
2382 tree lhs, rhs;
2383 {
2384 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2385 tree type = TREE_TYPE (lhs);
2386 tree signed_type, unsigned_type;
2387 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2388 enum machine_mode lmode, rmode, nmode;
2389 int lunsignedp, runsignedp;
2390 int lvolatilep = 0, rvolatilep = 0;
2391 tree linner, rinner = NULL_TREE;
2392 tree mask;
2393 tree offset;
2394
2395 /* Get all the information about the extractions being done. If the bit size
2396 if the same as the size of the underlying object, we aren't doing an
2397 extraction at all and so can do nothing. We also don't want to
2398 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2399 then will no longer be able to replace it. */
2400 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2401 &lunsignedp, &lvolatilep);
2402 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2403 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2404 return 0;
2405
2406 if (!const_p)
2407 {
2408 /* If this is not a constant, we can only do something if bit positions,
2409 sizes, and signedness are the same. */
2410 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2411 &runsignedp, &rvolatilep);
2412
2413 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2414 || lunsignedp != runsignedp || offset != 0
2415 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2416 return 0;
2417 }
2418
2419 /* See if we can find a mode to refer to this field. We should be able to,
2420 but fail if we can't. */
2421 nmode = get_best_mode (lbitsize, lbitpos,
2422 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2423 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2424 TYPE_ALIGN (TREE_TYPE (rinner))),
2425 word_mode, lvolatilep || rvolatilep);
2426 if (nmode == VOIDmode)
2427 return 0;
2428
2429 /* Set signed and unsigned types of the precision of this mode for the
2430 shifts below. */
2431 signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2432 unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2433
2434 /* Compute the bit position and size for the new reference and our offset
2435 within it. If the new reference is the same size as the original, we
2436 won't optimize anything, so return zero. */
2437 nbitsize = GET_MODE_BITSIZE (nmode);
2438 nbitpos = lbitpos & ~ (nbitsize - 1);
2439 lbitpos -= nbitpos;
2440 if (nbitsize == lbitsize)
2441 return 0;
2442
2443 if (BYTES_BIG_ENDIAN)
2444 lbitpos = nbitsize - lbitsize - lbitpos;
2445
2446 /* Make the mask to be used against the extracted field. */
2447 mask = build_int_2 (~0, ~0);
2448 TREE_TYPE (mask) = unsigned_type;
2449 force_fit_type (mask, 0);
2450 mask = convert (unsigned_type, mask);
2451 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2452 mask = const_binop (RSHIFT_EXPR, mask,
2453 size_int (nbitsize - lbitsize - lbitpos), 0);
2454
2455 if (! const_p)
2456 /* If not comparing with constant, just rework the comparison
2457 and return. */
2458 return build (code, compare_type,
2459 build (BIT_AND_EXPR, unsigned_type,
2460 make_bit_field_ref (linner, unsigned_type,
2461 nbitsize, nbitpos, 1),
2462 mask),
2463 build (BIT_AND_EXPR, unsigned_type,
2464 make_bit_field_ref (rinner, unsigned_type,
2465 nbitsize, nbitpos, 1),
2466 mask));
2467
2468 /* Otherwise, we are handling the constant case. See if the constant is too
2469 big for the field. Warn and return a tree of for 0 (false) if so. We do
2470 this not only for its own sake, but to avoid having to test for this
2471 error case below. If we didn't, we might generate wrong code.
2472
2473 For unsigned fields, the constant shifted right by the field length should
2474 be all zero. For signed fields, the high-order bits should agree with
2475 the sign bit. */
2476
2477 if (lunsignedp)
2478 {
2479 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2480 convert (unsigned_type, rhs),
2481 size_int (lbitsize), 0)))
2482 {
2483 warning ("comparison is always %d due to width of bit-field",
2484 code == NE_EXPR);
2485 return convert (compare_type,
2486 (code == NE_EXPR
2487 ? integer_one_node : integer_zero_node));
2488 }
2489 }
2490 else
2491 {
2492 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2493 size_int (lbitsize - 1), 0);
2494 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2495 {
2496 warning ("comparison is always %d due to width of bit-field",
2497 code == NE_EXPR);
2498 return convert (compare_type,
2499 (code == NE_EXPR
2500 ? integer_one_node : integer_zero_node));
2501 }
2502 }
2503
2504 /* Single-bit compares should always be against zero. */
2505 if (lbitsize == 1 && ! integer_zerop (rhs))
2506 {
2507 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2508 rhs = convert (type, integer_zero_node);
2509 }
2510
2511 /* Make a new bitfield reference, shift the constant over the
2512 appropriate number of bits and mask it with the computed mask
2513 (in case this was a signed field). If we changed it, make a new one. */
2514 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2515 if (lvolatilep)
2516 {
2517 TREE_SIDE_EFFECTS (lhs) = 1;
2518 TREE_THIS_VOLATILE (lhs) = 1;
2519 }
2520
2521 rhs = fold (const_binop (BIT_AND_EXPR,
2522 const_binop (LSHIFT_EXPR,
2523 convert (unsigned_type, rhs),
2524 size_int (lbitpos), 0),
2525 mask, 0));
2526
2527 return build (code, compare_type,
2528 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2529 rhs);
2530 }
2531 \f
2532 /* Subroutine for fold_truthop: decode a field reference.
2533
2534 If EXP is a comparison reference, we return the innermost reference.
2535
2536 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2537 set to the starting bit number.
2538
2539 If the innermost field can be completely contained in a mode-sized
2540 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2541
2542 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2543 otherwise it is not changed.
2544
2545 *PUNSIGNEDP is set to the signedness of the field.
2546
2547 *PMASK is set to the mask used. This is either contained in a
2548 BIT_AND_EXPR or derived from the width of the field.
2549
2550 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2551
2552 Return 0 if this is not a component reference or is one that we can't
2553 do anything with. */
2554
2555 static tree
2556 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
2557 pvolatilep, pmask, pand_mask)
2558 tree exp;
2559 HOST_WIDE_INT *pbitsize, *pbitpos;
2560 enum machine_mode *pmode;
2561 int *punsignedp, *pvolatilep;
2562 tree *pmask;
2563 tree *pand_mask;
2564 {
2565 tree and_mask = 0;
2566 tree mask, inner, offset;
2567 tree unsigned_type;
2568 unsigned int precision;
2569
2570 /* All the optimizations using this function assume integer fields.
2571 There are problems with FP fields since the type_for_size call
2572 below can fail for, e.g., XFmode. */
2573 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2574 return 0;
2575
2576 STRIP_NOPS (exp);
2577
2578 if (TREE_CODE (exp) == BIT_AND_EXPR)
2579 {
2580 and_mask = TREE_OPERAND (exp, 1);
2581 exp = TREE_OPERAND (exp, 0);
2582 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2583 if (TREE_CODE (and_mask) != INTEGER_CST)
2584 return 0;
2585 }
2586
2587 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2588 punsignedp, pvolatilep);
2589 if ((inner == exp && and_mask == 0)
2590 || *pbitsize < 0 || offset != 0
2591 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
2592 return 0;
2593
2594 /* Compute the mask to access the bitfield. */
2595 unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
2596 precision = TYPE_PRECISION (unsigned_type);
2597
2598 mask = build_int_2 (~0, ~0);
2599 TREE_TYPE (mask) = unsigned_type;
2600 force_fit_type (mask, 0);
2601 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2602 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2603
2604 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2605 if (and_mask != 0)
2606 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2607 convert (unsigned_type, and_mask), mask));
2608
2609 *pmask = mask;
2610 *pand_mask = and_mask;
2611 return inner;
2612 }
2613
2614 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
2615 bit positions. */
2616
2617 static int
2618 all_ones_mask_p (mask, size)
2619 tree mask;
2620 int size;
2621 {
2622 tree type = TREE_TYPE (mask);
2623 unsigned int precision = TYPE_PRECISION (type);
2624 tree tmask;
2625
2626 tmask = build_int_2 (~0, ~0);
2627 TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
2628 force_fit_type (tmask, 0);
2629 return
2630 tree_int_cst_equal (mask,
2631 const_binop (RSHIFT_EXPR,
2632 const_binop (LSHIFT_EXPR, tmask,
2633 size_int (precision - size),
2634 0),
2635 size_int (precision - size), 0));
2636 }
2637
2638 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
2639 represents the sign bit of EXP's type. If EXP represents a sign
2640 or zero extension, also test VAL against the unextended type.
2641 The return value is the (sub)expression whose sign bit is VAL,
2642 or NULL_TREE otherwise. */
2643
2644 static tree
2645 sign_bit_p (exp, val)
2646 tree exp;
2647 tree val;
2648 {
2649 unsigned HOST_WIDE_INT lo;
2650 HOST_WIDE_INT hi;
2651 int width;
2652 tree t;
2653
2654 /* Tree EXP must have a integral type. */
2655 t = TREE_TYPE (exp);
2656 if (! INTEGRAL_TYPE_P (t))
2657 return NULL_TREE;
2658
2659 /* Tree VAL must be an integer constant. */
2660 if (TREE_CODE (val) != INTEGER_CST
2661 || TREE_CONSTANT_OVERFLOW (val))
2662 return NULL_TREE;
2663
2664 width = TYPE_PRECISION (t);
2665 if (width > HOST_BITS_PER_WIDE_INT)
2666 {
2667 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
2668 lo = 0;
2669 }
2670 else
2671 {
2672 hi = 0;
2673 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
2674 }
2675
2676 if (TREE_INT_CST_HIGH (val) == hi && TREE_INT_CST_LOW (val) == lo)
2677 return exp;
2678
2679 /* Handle extension from a narrower type. */
2680 if (TREE_CODE (exp) == NOP_EXPR
2681 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
2682 return sign_bit_p (TREE_OPERAND (exp, 0), val);
2683
2684 return NULL_TREE;
2685 }
2686
2687 /* Subroutine for fold_truthop: determine if an operand is simple enough
2688 to be evaluated unconditionally. */
2689
2690 static int
2691 simple_operand_p (exp)
2692 tree exp;
2693 {
2694 /* Strip any conversions that don't change the machine mode. */
2695 while ((TREE_CODE (exp) == NOP_EXPR
2696 || TREE_CODE (exp) == CONVERT_EXPR)
2697 && (TYPE_MODE (TREE_TYPE (exp))
2698 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2699 exp = TREE_OPERAND (exp, 0);
2700
2701 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2702 || (DECL_P (exp)
2703 && ! TREE_ADDRESSABLE (exp)
2704 && ! TREE_THIS_VOLATILE (exp)
2705 && ! DECL_NONLOCAL (exp)
2706 /* Don't regard global variables as simple. They may be
2707 allocated in ways unknown to the compiler (shared memory,
2708 #pragma weak, etc). */
2709 && ! TREE_PUBLIC (exp)
2710 && ! DECL_EXTERNAL (exp)
2711 /* Loading a static variable is unduly expensive, but global
2712 registers aren't expensive. */
2713 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2714 }
2715 \f
2716 /* The following functions are subroutines to fold_range_test and allow it to
2717 try to change a logical combination of comparisons into a range test.
2718
2719 For example, both
2720 X == 2 || X == 3 || X == 4 || X == 5
2721 and
2722 X >= 2 && X <= 5
2723 are converted to
2724 (unsigned) (X - 2) <= 3
2725
2726 We describe each set of comparisons as being either inside or outside
2727 a range, using a variable named like IN_P, and then describe the
2728 range with a lower and upper bound. If one of the bounds is omitted,
2729 it represents either the highest or lowest value of the type.
2730
2731 In the comments below, we represent a range by two numbers in brackets
2732 preceded by a "+" to designate being inside that range, or a "-" to
2733 designate being outside that range, so the condition can be inverted by
2734 flipping the prefix. An omitted bound is represented by a "-". For
2735 example, "- [-, 10]" means being outside the range starting at the lowest
2736 possible value and ending at 10, in other words, being greater than 10.
2737 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
2738 always false.
2739
2740 We set up things so that the missing bounds are handled in a consistent
2741 manner so neither a missing bound nor "true" and "false" need to be
2742 handled using a special case. */
2743
2744 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
2745 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
2746 and UPPER1_P are nonzero if the respective argument is an upper bound
2747 and zero for a lower. TYPE, if nonzero, is the type of the result; it
2748 must be specified for a comparison. ARG1 will be converted to ARG0's
2749 type if both are specified. */
2750
2751 static tree
2752 range_binop (code, type, arg0, upper0_p, arg1, upper1_p)
2753 enum tree_code code;
2754 tree type;
2755 tree arg0, arg1;
2756 int upper0_p, upper1_p;
2757 {
2758 tree tem;
2759 int result;
2760 int sgn0, sgn1;
2761
2762 /* If neither arg represents infinity, do the normal operation.
2763 Else, if not a comparison, return infinity. Else handle the special
2764 comparison rules. Note that most of the cases below won't occur, but
2765 are handled for consistency. */
2766
2767 if (arg0 != 0 && arg1 != 0)
2768 {
2769 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
2770 arg0, convert (TREE_TYPE (arg0), arg1)));
2771 STRIP_NOPS (tem);
2772 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
2773 }
2774
2775 if (TREE_CODE_CLASS (code) != '<')
2776 return 0;
2777
2778 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
2779 for neither. In real maths, we cannot assume open ended ranges are
2780 the same. But, this is computer arithmetic, where numbers are finite.
2781 We can therefore make the transformation of any unbounded range with
2782 the value Z, Z being greater than any representable number. This permits
2783 us to treat unbounded ranges as equal. */
2784 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
2785 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
2786 switch (code)
2787 {
2788 case EQ_EXPR:
2789 result = sgn0 == sgn1;
2790 break;
2791 case NE_EXPR:
2792 result = sgn0 != sgn1;
2793 break;
2794 case LT_EXPR:
2795 result = sgn0 < sgn1;
2796 break;
2797 case LE_EXPR:
2798 result = sgn0 <= sgn1;
2799 break;
2800 case GT_EXPR:
2801 result = sgn0 > sgn1;
2802 break;
2803 case GE_EXPR:
2804 result = sgn0 >= sgn1;
2805 break;
2806 default:
2807 abort ();
2808 }
2809
2810 return convert (type, result ? integer_one_node : integer_zero_node);
2811 }
2812 \f
2813 /* Given EXP, a logical expression, set the range it is testing into
2814 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
2815 actually being tested. *PLOW and *PHIGH will be made of the same type
2816 as the returned expression. If EXP is not a comparison, we will most
2817 likely not be returning a useful value and range. */
2818
2819 static tree
2820 make_range (exp, pin_p, plow, phigh)
2821 tree exp;
2822 int *pin_p;
2823 tree *plow, *phigh;
2824 {
2825 enum tree_code code;
2826 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
2827 tree orig_type = NULL_TREE;
2828 int in_p, n_in_p;
2829 tree low, high, n_low, n_high;
2830
2831 /* Start with simply saying "EXP != 0" and then look at the code of EXP
2832 and see if we can refine the range. Some of the cases below may not
2833 happen, but it doesn't seem worth worrying about this. We "continue"
2834 the outer loop when we've changed something; otherwise we "break"
2835 the switch, which will "break" the while. */
2836
2837 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
2838
2839 while (1)
2840 {
2841 code = TREE_CODE (exp);
2842
2843 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
2844 {
2845 arg0 = TREE_OPERAND (exp, 0);
2846 if (TREE_CODE_CLASS (code) == '<'
2847 || TREE_CODE_CLASS (code) == '1'
2848 || TREE_CODE_CLASS (code) == '2')
2849 type = TREE_TYPE (arg0);
2850 if (TREE_CODE_CLASS (code) == '2'
2851 || TREE_CODE_CLASS (code) == '<'
2852 || (TREE_CODE_CLASS (code) == 'e'
2853 && TREE_CODE_LENGTH (code) > 1))
2854 arg1 = TREE_OPERAND (exp, 1);
2855 }
2856
2857 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
2858 lose a cast by accident. */
2859 if (type != NULL_TREE && orig_type == NULL_TREE)
2860 orig_type = type;
2861
2862 switch (code)
2863 {
2864 case TRUTH_NOT_EXPR:
2865 in_p = ! in_p, exp = arg0;
2866 continue;
2867
2868 case EQ_EXPR: case NE_EXPR:
2869 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
2870 /* We can only do something if the range is testing for zero
2871 and if the second operand is an integer constant. Note that
2872 saying something is "in" the range we make is done by
2873 complementing IN_P since it will set in the initial case of
2874 being not equal to zero; "out" is leaving it alone. */
2875 if (low == 0 || high == 0
2876 || ! integer_zerop (low) || ! integer_zerop (high)
2877 || TREE_CODE (arg1) != INTEGER_CST)
2878 break;
2879
2880 switch (code)
2881 {
2882 case NE_EXPR: /* - [c, c] */
2883 low = high = arg1;
2884 break;
2885 case EQ_EXPR: /* + [c, c] */
2886 in_p = ! in_p, low = high = arg1;
2887 break;
2888 case GT_EXPR: /* - [-, c] */
2889 low = 0, high = arg1;
2890 break;
2891 case GE_EXPR: /* + [c, -] */
2892 in_p = ! in_p, low = arg1, high = 0;
2893 break;
2894 case LT_EXPR: /* - [c, -] */
2895 low = arg1, high = 0;
2896 break;
2897 case LE_EXPR: /* + [-, c] */
2898 in_p = ! in_p, low = 0, high = arg1;
2899 break;
2900 default:
2901 abort ();
2902 }
2903
2904 exp = arg0;
2905
2906 /* If this is an unsigned comparison, we also know that EXP is
2907 greater than or equal to zero. We base the range tests we make
2908 on that fact, so we record it here so we can parse existing
2909 range tests. */
2910 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
2911 {
2912 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
2913 1, convert (type, integer_zero_node),
2914 NULL_TREE))
2915 break;
2916
2917 in_p = n_in_p, low = n_low, high = n_high;
2918
2919 /* If the high bound is missing, but we
2920 have a low bound, reverse the range so
2921 it goes from zero to the low bound minus 1. */
2922 if (high == 0 && low)
2923 {
2924 in_p = ! in_p;
2925 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
2926 integer_one_node, 0);
2927 low = convert (type, integer_zero_node);
2928 }
2929 }
2930 continue;
2931
2932 case NEGATE_EXPR:
2933 /* (-x) IN [a,b] -> x in [-b, -a] */
2934 n_low = range_binop (MINUS_EXPR, type,
2935 convert (type, integer_zero_node), 0, high, 1);
2936 n_high = range_binop (MINUS_EXPR, type,
2937 convert (type, integer_zero_node), 0, low, 0);
2938 low = n_low, high = n_high;
2939 exp = arg0;
2940 continue;
2941
2942 case BIT_NOT_EXPR:
2943 /* ~ X -> -X - 1 */
2944 exp = build (MINUS_EXPR, type, negate_expr (arg0),
2945 convert (type, integer_one_node));
2946 continue;
2947
2948 case PLUS_EXPR: case MINUS_EXPR:
2949 if (TREE_CODE (arg1) != INTEGER_CST)
2950 break;
2951
2952 /* If EXP is signed, any overflow in the computation is undefined,
2953 so we don't worry about it so long as our computations on
2954 the bounds don't overflow. For unsigned, overflow is defined
2955 and this is exactly the right thing. */
2956 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
2957 type, low, 0, arg1, 0);
2958 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
2959 type, high, 1, arg1, 0);
2960 if ((n_low != 0 && TREE_OVERFLOW (n_low))
2961 || (n_high != 0 && TREE_OVERFLOW (n_high)))
2962 break;
2963
2964 /* Check for an unsigned range which has wrapped around the maximum
2965 value thus making n_high < n_low, and normalize it. */
2966 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
2967 {
2968 low = range_binop (PLUS_EXPR, type, n_high, 0,
2969 integer_one_node, 0);
2970 high = range_binop (MINUS_EXPR, type, n_low, 0,
2971 integer_one_node, 0);
2972
2973 /* If the range is of the form +/- [ x+1, x ], we won't
2974 be able to normalize it. But then, it represents the
2975 whole range or the empty set, so make it
2976 +/- [ -, - ]. */
2977 if (tree_int_cst_equal (n_low, low)
2978 && tree_int_cst_equal (n_high, high))
2979 low = high = 0;
2980 else
2981 in_p = ! in_p;
2982 }
2983 else
2984 low = n_low, high = n_high;
2985
2986 exp = arg0;
2987 continue;
2988
2989 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
2990 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
2991 break;
2992
2993 if (! INTEGRAL_TYPE_P (type)
2994 || (low != 0 && ! int_fits_type_p (low, type))
2995 || (high != 0 && ! int_fits_type_p (high, type)))
2996 break;
2997
2998 n_low = low, n_high = high;
2999
3000 if (n_low != 0)
3001 n_low = convert (type, n_low);
3002
3003 if (n_high != 0)
3004 n_high = convert (type, n_high);
3005
3006 /* If we're converting from an unsigned to a signed type,
3007 we will be doing the comparison as unsigned. The tests above
3008 have already verified that LOW and HIGH are both positive.
3009
3010 So we have to make sure that the original unsigned value will
3011 be interpreted as positive. */
3012 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3013 {
3014 tree equiv_type = (*lang_hooks.types.type_for_mode)
3015 (TYPE_MODE (type), 1);
3016 tree high_positive;
3017
3018 /* A range without an upper bound is, naturally, unbounded.
3019 Since convert would have cropped a very large value, use
3020 the max value for the destination type. */
3021 high_positive
3022 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3023 : TYPE_MAX_VALUE (type);
3024
3025 high_positive = fold (build (RSHIFT_EXPR, type,
3026 convert (type, high_positive),
3027 convert (type, integer_one_node)));
3028
3029 /* If the low bound is specified, "and" the range with the
3030 range for which the original unsigned value will be
3031 positive. */
3032 if (low != 0)
3033 {
3034 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3035 1, n_low, n_high,
3036 1, convert (type, integer_zero_node),
3037 high_positive))
3038 break;
3039
3040 in_p = (n_in_p == in_p);
3041 }
3042 else
3043 {
3044 /* Otherwise, "or" the range with the range of the input
3045 that will be interpreted as negative. */
3046 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3047 0, n_low, n_high,
3048 1, convert (type, integer_zero_node),
3049 high_positive))
3050 break;
3051
3052 in_p = (in_p != n_in_p);
3053 }
3054 }
3055
3056 exp = arg0;
3057 low = n_low, high = n_high;
3058 continue;
3059
3060 default:
3061 break;
3062 }
3063
3064 break;
3065 }
3066
3067 /* If EXP is a constant, we can evaluate whether this is true or false. */
3068 if (TREE_CODE (exp) == INTEGER_CST)
3069 {
3070 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3071 exp, 0, low, 0))
3072 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3073 exp, 1, high, 1)));
3074 low = high = 0;
3075 exp = 0;
3076 }
3077
3078 *pin_p = in_p, *plow = low, *phigh = high;
3079 return exp;
3080 }
3081 \f
3082 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3083 type, TYPE, return an expression to test if EXP is in (or out of, depending
3084 on IN_P) the range. */
3085
3086 static tree
3087 build_range_check (type, exp, in_p, low, high)
3088 tree type;
3089 tree exp;
3090 int in_p;
3091 tree low, high;
3092 {
3093 tree etype = TREE_TYPE (exp);
3094 tree value;
3095
3096 if (! in_p
3097 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3098 return invert_truthvalue (value);
3099
3100 if (low == 0 && high == 0)
3101 return convert (type, integer_one_node);
3102
3103 if (low == 0)
3104 return fold (build (LE_EXPR, type, exp, high));
3105
3106 if (high == 0)
3107 return fold (build (GE_EXPR, type, exp, low));
3108
3109 if (operand_equal_p (low, high, 0))
3110 return fold (build (EQ_EXPR, type, exp, low));
3111
3112 if (integer_zerop (low))
3113 {
3114 if (! TREE_UNSIGNED (etype))
3115 {
3116 etype = (*lang_hooks.types.unsigned_type) (etype);
3117 high = convert (etype, high);
3118 exp = convert (etype, exp);
3119 }
3120 return build_range_check (type, exp, 1, 0, high);
3121 }
3122
3123 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3124 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3125 {
3126 unsigned HOST_WIDE_INT lo;
3127 HOST_WIDE_INT hi;
3128 int prec;
3129
3130 prec = TYPE_PRECISION (etype);
3131 if (prec <= HOST_BITS_PER_WIDE_INT)
3132 {
3133 hi = 0;
3134 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3135 }
3136 else
3137 {
3138 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3139 lo = (unsigned HOST_WIDE_INT) -1;
3140 }
3141
3142 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3143 {
3144 if (TREE_UNSIGNED (etype))
3145 {
3146 etype = (*lang_hooks.types.signed_type) (etype);
3147 exp = convert (etype, exp);
3148 }
3149 return fold (build (GT_EXPR, type, exp,
3150 convert (etype, integer_zero_node)));
3151 }
3152 }
3153
3154 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3155 && ! TREE_OVERFLOW (value))
3156 return build_range_check (type,
3157 fold (build (MINUS_EXPR, etype, exp, low)),
3158 1, convert (etype, integer_zero_node), value);
3159
3160 return 0;
3161 }
3162 \f
3163 /* Given two ranges, see if we can merge them into one. Return 1 if we
3164 can, 0 if we can't. Set the output range into the specified parameters. */
3165
3166 static int
3167 merge_ranges (pin_p, plow, phigh, in0_p, low0, high0, in1_p, low1, high1)
3168 int *pin_p;
3169 tree *plow, *phigh;
3170 int in0_p, in1_p;
3171 tree low0, high0, low1, high1;
3172 {
3173 int no_overlap;
3174 int subset;
3175 int temp;
3176 tree tem;
3177 int in_p;
3178 tree low, high;
3179 int lowequal = ((low0 == 0 && low1 == 0)
3180 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3181 low0, 0, low1, 0)));
3182 int highequal = ((high0 == 0 && high1 == 0)
3183 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3184 high0, 1, high1, 1)));
3185
3186 /* Make range 0 be the range that starts first, or ends last if they
3187 start at the same value. Swap them if it isn't. */
3188 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3189 low0, 0, low1, 0))
3190 || (lowequal
3191 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3192 high1, 1, high0, 1))))
3193 {
3194 temp = in0_p, in0_p = in1_p, in1_p = temp;
3195 tem = low0, low0 = low1, low1 = tem;
3196 tem = high0, high0 = high1, high1 = tem;
3197 }
3198
3199 /* Now flag two cases, whether the ranges are disjoint or whether the
3200 second range is totally subsumed in the first. Note that the tests
3201 below are simplified by the ones above. */
3202 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3203 high0, 1, low1, 0));
3204 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3205 high1, 1, high0, 1));
3206
3207 /* We now have four cases, depending on whether we are including or
3208 excluding the two ranges. */
3209 if (in0_p && in1_p)
3210 {
3211 /* If they don't overlap, the result is false. If the second range
3212 is a subset it is the result. Otherwise, the range is from the start
3213 of the second to the end of the first. */
3214 if (no_overlap)
3215 in_p = 0, low = high = 0;
3216 else if (subset)
3217 in_p = 1, low = low1, high = high1;
3218 else
3219 in_p = 1, low = low1, high = high0;
3220 }
3221
3222 else if (in0_p && ! in1_p)
3223 {
3224 /* If they don't overlap, the result is the first range. If they are
3225 equal, the result is false. If the second range is a subset of the
3226 first, and the ranges begin at the same place, we go from just after
3227 the end of the first range to the end of the second. If the second
3228 range is not a subset of the first, or if it is a subset and both
3229 ranges end at the same place, the range starts at the start of the
3230 first range and ends just before the second range.
3231 Otherwise, we can't describe this as a single range. */
3232 if (no_overlap)
3233 in_p = 1, low = low0, high = high0;
3234 else if (lowequal && highequal)
3235 in_p = 0, low = high = 0;
3236 else if (subset && lowequal)
3237 {
3238 in_p = 1, high = high0;
3239 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3240 integer_one_node, 0);
3241 }
3242 else if (! subset || highequal)
3243 {
3244 in_p = 1, low = low0;
3245 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3246 integer_one_node, 0);
3247 }
3248 else
3249 return 0;
3250 }
3251
3252 else if (! in0_p && in1_p)
3253 {
3254 /* If they don't overlap, the result is the second range. If the second
3255 is a subset of the first, the result is false. Otherwise,
3256 the range starts just after the first range and ends at the
3257 end of the second. */
3258 if (no_overlap)
3259 in_p = 1, low = low1, high = high1;
3260 else if (subset || highequal)
3261 in_p = 0, low = high = 0;
3262 else
3263 {
3264 in_p = 1, high = high1;
3265 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3266 integer_one_node, 0);
3267 }
3268 }
3269
3270 else
3271 {
3272 /* The case where we are excluding both ranges. Here the complex case
3273 is if they don't overlap. In that case, the only time we have a
3274 range is if they are adjacent. If the second is a subset of the
3275 first, the result is the first. Otherwise, the range to exclude
3276 starts at the beginning of the first range and ends at the end of the
3277 second. */
3278 if (no_overlap)
3279 {
3280 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3281 range_binop (PLUS_EXPR, NULL_TREE,
3282 high0, 1,
3283 integer_one_node, 1),
3284 1, low1, 0)))
3285 in_p = 0, low = low0, high = high1;
3286 else
3287 return 0;
3288 }
3289 else if (subset)
3290 in_p = 0, low = low0, high = high0;
3291 else
3292 in_p = 0, low = low0, high = high1;
3293 }
3294
3295 *pin_p = in_p, *plow = low, *phigh = high;
3296 return 1;
3297 }
3298 \f
3299 /* EXP is some logical combination of boolean tests. See if we can
3300 merge it into some range test. Return the new tree if so. */
3301
3302 static tree
3303 fold_range_test (exp)
3304 tree exp;
3305 {
3306 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3307 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3308 int in0_p, in1_p, in_p;
3309 tree low0, low1, low, high0, high1, high;
3310 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3311 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3312 tree tem;
3313
3314 /* If this is an OR operation, invert both sides; we will invert
3315 again at the end. */
3316 if (or_op)
3317 in0_p = ! in0_p, in1_p = ! in1_p;
3318
3319 /* If both expressions are the same, if we can merge the ranges, and we
3320 can build the range test, return it or it inverted. If one of the
3321 ranges is always true or always false, consider it to be the same
3322 expression as the other. */
3323 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3324 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3325 in1_p, low1, high1)
3326 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3327 lhs != 0 ? lhs
3328 : rhs != 0 ? rhs : integer_zero_node,
3329 in_p, low, high))))
3330 return or_op ? invert_truthvalue (tem) : tem;
3331
3332 /* On machines where the branch cost is expensive, if this is a
3333 short-circuited branch and the underlying object on both sides
3334 is the same, make a non-short-circuit operation. */
3335 else if (BRANCH_COST >= 2
3336 && lhs != 0 && rhs != 0
3337 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3338 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3339 && operand_equal_p (lhs, rhs, 0))
3340 {
3341 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3342 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3343 which cases we can't do this. */
3344 if (simple_operand_p (lhs))
3345 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3346 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3347 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3348 TREE_OPERAND (exp, 1));
3349
3350 else if ((*lang_hooks.decls.global_bindings_p) () == 0
3351 && ! contains_placeholder_p (lhs))
3352 {
3353 tree common = save_expr (lhs);
3354
3355 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3356 or_op ? ! in0_p : in0_p,
3357 low0, high0))
3358 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3359 or_op ? ! in1_p : in1_p,
3360 low1, high1))))
3361 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3362 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3363 TREE_TYPE (exp), lhs, rhs);
3364 }
3365 }
3366
3367 return 0;
3368 }
3369 \f
3370 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3371 bit value. Arrange things so the extra bits will be set to zero if and
3372 only if C is signed-extended to its full width. If MASK is nonzero,
3373 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3374
3375 static tree
3376 unextend (c, p, unsignedp, mask)
3377 tree c;
3378 int p;
3379 int unsignedp;
3380 tree mask;
3381 {
3382 tree type = TREE_TYPE (c);
3383 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3384 tree temp;
3385
3386 if (p == modesize || unsignedp)
3387 return c;
3388
3389 /* We work by getting just the sign bit into the low-order bit, then
3390 into the high-order bit, then sign-extend. We then XOR that value
3391 with C. */
3392 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3393 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3394
3395 /* We must use a signed type in order to get an arithmetic right shift.
3396 However, we must also avoid introducing accidental overflows, so that
3397 a subsequent call to integer_zerop will work. Hence we must
3398 do the type conversion here. At this point, the constant is either
3399 zero or one, and the conversion to a signed type can never overflow.
3400 We could get an overflow if this conversion is done anywhere else. */
3401 if (TREE_UNSIGNED (type))
3402 temp = convert ((*lang_hooks.types.signed_type) (type), temp);
3403
3404 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3405 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3406 if (mask != 0)
3407 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3408 /* If necessary, convert the type back to match the type of C. */
3409 if (TREE_UNSIGNED (type))
3410 temp = convert (type, temp);
3411
3412 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3413 }
3414 \f
3415 /* Find ways of folding logical expressions of LHS and RHS:
3416 Try to merge two comparisons to the same innermost item.
3417 Look for range tests like "ch >= '0' && ch <= '9'".
3418 Look for combinations of simple terms on machines with expensive branches
3419 and evaluate the RHS unconditionally.
3420
3421 For example, if we have p->a == 2 && p->b == 4 and we can make an
3422 object large enough to span both A and B, we can do this with a comparison
3423 against the object ANDed with the a mask.
3424
3425 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3426 operations to do this with one comparison.
3427
3428 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3429 function and the one above.
3430
3431 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3432 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3433
3434 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3435 two operands.
3436
3437 We return the simplified tree or 0 if no optimization is possible. */
3438
3439 static tree
3440 fold_truthop (code, truth_type, lhs, rhs)
3441 enum tree_code code;
3442 tree truth_type, lhs, rhs;
3443 {
3444 /* If this is the "or" of two comparisons, we can do something if
3445 the comparisons are NE_EXPR. If this is the "and", we can do something
3446 if the comparisons are EQ_EXPR. I.e.,
3447 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3448
3449 WANTED_CODE is this operation code. For single bit fields, we can
3450 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3451 comparison for one-bit fields. */
3452
3453 enum tree_code wanted_code;
3454 enum tree_code lcode, rcode;
3455 tree ll_arg, lr_arg, rl_arg, rr_arg;
3456 tree ll_inner, lr_inner, rl_inner, rr_inner;
3457 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3458 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3459 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3460 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3461 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3462 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3463 enum machine_mode lnmode, rnmode;
3464 tree ll_mask, lr_mask, rl_mask, rr_mask;
3465 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3466 tree l_const, r_const;
3467 tree lntype, rntype, result;
3468 int first_bit, end_bit;
3469 int volatilep;
3470
3471 /* Start by getting the comparison codes. Fail if anything is volatile.
3472 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3473 it were surrounded with a NE_EXPR. */
3474
3475 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3476 return 0;
3477
3478 lcode = TREE_CODE (lhs);
3479 rcode = TREE_CODE (rhs);
3480
3481 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3482 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3483
3484 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3485 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3486
3487 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3488 return 0;
3489
3490 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3491 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3492
3493 ll_arg = TREE_OPERAND (lhs, 0);
3494 lr_arg = TREE_OPERAND (lhs, 1);
3495 rl_arg = TREE_OPERAND (rhs, 0);
3496 rr_arg = TREE_OPERAND (rhs, 1);
3497
3498 /* If the RHS can be evaluated unconditionally and its operands are
3499 simple, it wins to evaluate the RHS unconditionally on machines
3500 with expensive branches. In this case, this isn't a comparison
3501 that can be merged. Avoid doing this if the RHS is a floating-point
3502 comparison since those can trap. */
3503
3504 if (BRANCH_COST >= 2
3505 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3506 && simple_operand_p (rl_arg)
3507 && simple_operand_p (rr_arg))
3508 return build (code, truth_type, lhs, rhs);
3509
3510 /* See if the comparisons can be merged. Then get all the parameters for
3511 each side. */
3512
3513 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3514 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3515 return 0;
3516
3517 volatilep = 0;
3518 ll_inner = decode_field_reference (ll_arg,
3519 &ll_bitsize, &ll_bitpos, &ll_mode,
3520 &ll_unsignedp, &volatilep, &ll_mask,
3521 &ll_and_mask);
3522 lr_inner = decode_field_reference (lr_arg,
3523 &lr_bitsize, &lr_bitpos, &lr_mode,
3524 &lr_unsignedp, &volatilep, &lr_mask,
3525 &lr_and_mask);
3526 rl_inner = decode_field_reference (rl_arg,
3527 &rl_bitsize, &rl_bitpos, &rl_mode,
3528 &rl_unsignedp, &volatilep, &rl_mask,
3529 &rl_and_mask);
3530 rr_inner = decode_field_reference (rr_arg,
3531 &rr_bitsize, &rr_bitpos, &rr_mode,
3532 &rr_unsignedp, &volatilep, &rr_mask,
3533 &rr_and_mask);
3534
3535 /* It must be true that the inner operation on the lhs of each
3536 comparison must be the same if we are to be able to do anything.
3537 Then see if we have constants. If not, the same must be true for
3538 the rhs's. */
3539 if (volatilep || ll_inner == 0 || rl_inner == 0
3540 || ! operand_equal_p (ll_inner, rl_inner, 0))
3541 return 0;
3542
3543 if (TREE_CODE (lr_arg) == INTEGER_CST
3544 && TREE_CODE (rr_arg) == INTEGER_CST)
3545 l_const = lr_arg, r_const = rr_arg;
3546 else if (lr_inner == 0 || rr_inner == 0
3547 || ! operand_equal_p (lr_inner, rr_inner, 0))
3548 return 0;
3549 else
3550 l_const = r_const = 0;
3551
3552 /* If either comparison code is not correct for our logical operation,
3553 fail. However, we can convert a one-bit comparison against zero into
3554 the opposite comparison against that bit being set in the field. */
3555
3556 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3557 if (lcode != wanted_code)
3558 {
3559 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3560 {
3561 /* Make the left operand unsigned, since we are only interested
3562 in the value of one bit. Otherwise we are doing the wrong
3563 thing below. */
3564 ll_unsignedp = 1;
3565 l_const = ll_mask;
3566 }
3567 else
3568 return 0;
3569 }
3570
3571 /* This is analogous to the code for l_const above. */
3572 if (rcode != wanted_code)
3573 {
3574 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3575 {
3576 rl_unsignedp = 1;
3577 r_const = rl_mask;
3578 }
3579 else
3580 return 0;
3581 }
3582
3583 /* See if we can find a mode that contains both fields being compared on
3584 the left. If we can't, fail. Otherwise, update all constants and masks
3585 to be relative to a field of that size. */
3586 first_bit = MIN (ll_bitpos, rl_bitpos);
3587 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3588 lnmode = get_best_mode (end_bit - first_bit, first_bit,
3589 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3590 volatilep);
3591 if (lnmode == VOIDmode)
3592 return 0;
3593
3594 lnbitsize = GET_MODE_BITSIZE (lnmode);
3595 lnbitpos = first_bit & ~ (lnbitsize - 1);
3596 lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
3597 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3598
3599 if (BYTES_BIG_ENDIAN)
3600 {
3601 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3602 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3603 }
3604
3605 ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
3606 size_int (xll_bitpos), 0);
3607 rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
3608 size_int (xrl_bitpos), 0);
3609
3610 if (l_const)
3611 {
3612 l_const = convert (lntype, l_const);
3613 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3614 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3615 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3616 fold (build1 (BIT_NOT_EXPR,
3617 lntype, ll_mask)),
3618 0)))
3619 {
3620 warning ("comparison is always %d", wanted_code == NE_EXPR);
3621
3622 return convert (truth_type,
3623 wanted_code == NE_EXPR
3624 ? integer_one_node : integer_zero_node);
3625 }
3626 }
3627 if (r_const)
3628 {
3629 r_const = convert (lntype, r_const);
3630 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3631 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3632 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3633 fold (build1 (BIT_NOT_EXPR,
3634 lntype, rl_mask)),
3635 0)))
3636 {
3637 warning ("comparison is always %d", wanted_code == NE_EXPR);
3638
3639 return convert (truth_type,
3640 wanted_code == NE_EXPR
3641 ? integer_one_node : integer_zero_node);
3642 }
3643 }
3644
3645 /* If the right sides are not constant, do the same for it. Also,
3646 disallow this optimization if a size or signedness mismatch occurs
3647 between the left and right sides. */
3648 if (l_const == 0)
3649 {
3650 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3651 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3652 /* Make sure the two fields on the right
3653 correspond to the left without being swapped. */
3654 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3655 return 0;
3656
3657 first_bit = MIN (lr_bitpos, rr_bitpos);
3658 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3659 rnmode = get_best_mode (end_bit - first_bit, first_bit,
3660 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3661 volatilep);
3662 if (rnmode == VOIDmode)
3663 return 0;
3664
3665 rnbitsize = GET_MODE_BITSIZE (rnmode);
3666 rnbitpos = first_bit & ~ (rnbitsize - 1);
3667 rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
3668 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3669
3670 if (BYTES_BIG_ENDIAN)
3671 {
3672 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3673 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3674 }
3675
3676 lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
3677 size_int (xlr_bitpos), 0);
3678 rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
3679 size_int (xrr_bitpos), 0);
3680
3681 /* Make a mask that corresponds to both fields being compared.
3682 Do this for both items being compared. If the operands are the
3683 same size and the bits being compared are in the same position
3684 then we can do this by masking both and comparing the masked
3685 results. */
3686 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3687 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3688 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
3689 {
3690 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3691 ll_unsignedp || rl_unsignedp);
3692 if (! all_ones_mask_p (ll_mask, lnbitsize))
3693 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
3694
3695 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
3696 lr_unsignedp || rr_unsignedp);
3697 if (! all_ones_mask_p (lr_mask, rnbitsize))
3698 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
3699
3700 return build (wanted_code, truth_type, lhs, rhs);
3701 }
3702
3703 /* There is still another way we can do something: If both pairs of
3704 fields being compared are adjacent, we may be able to make a wider
3705 field containing them both.
3706
3707 Note that we still must mask the lhs/rhs expressions. Furthermore,
3708 the mask must be shifted to account for the shift done by
3709 make_bit_field_ref. */
3710 if ((ll_bitsize + ll_bitpos == rl_bitpos
3711 && lr_bitsize + lr_bitpos == rr_bitpos)
3712 || (ll_bitpos == rl_bitpos + rl_bitsize
3713 && lr_bitpos == rr_bitpos + rr_bitsize))
3714 {
3715 tree type;
3716
3717 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
3718 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
3719 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
3720 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
3721
3722 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
3723 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
3724 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
3725 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
3726
3727 /* Convert to the smaller type before masking out unwanted bits. */
3728 type = lntype;
3729 if (lntype != rntype)
3730 {
3731 if (lnbitsize > rnbitsize)
3732 {
3733 lhs = convert (rntype, lhs);
3734 ll_mask = convert (rntype, ll_mask);
3735 type = rntype;
3736 }
3737 else if (lnbitsize < rnbitsize)
3738 {
3739 rhs = convert (lntype, rhs);
3740 lr_mask = convert (lntype, lr_mask);
3741 type = lntype;
3742 }
3743 }
3744
3745 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
3746 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3747
3748 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
3749 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
3750
3751 return build (wanted_code, truth_type, lhs, rhs);
3752 }
3753
3754 return 0;
3755 }
3756
3757 /* Handle the case of comparisons with constants. If there is something in
3758 common between the masks, those bits of the constants must be the same.
3759 If not, the condition is always false. Test for this to avoid generating
3760 incorrect code below. */
3761 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3762 if (! integer_zerop (result)
3763 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3764 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3765 {
3766 if (wanted_code == NE_EXPR)
3767 {
3768 warning ("`or' of unmatched not-equal tests is always 1");
3769 return convert (truth_type, integer_one_node);
3770 }
3771 else
3772 {
3773 warning ("`and' of mutually exclusive equal-tests is always 0");
3774 return convert (truth_type, integer_zero_node);
3775 }
3776 }
3777
3778 /* Construct the expression we will return. First get the component
3779 reference we will make. Unless the mask is all ones the width of
3780 that field, perform the mask operation. Then compare with the
3781 merged constant. */
3782 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3783 ll_unsignedp || rl_unsignedp);
3784
3785 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3786 if (! all_ones_mask_p (ll_mask, lnbitsize))
3787 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
3788
3789 return build (wanted_code, truth_type, result,
3790 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
3791 }
3792 \f
3793 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
3794 constant. */
3795
3796 static tree
3797 optimize_minmax_comparison (t)
3798 tree t;
3799 {
3800 tree type = TREE_TYPE (t);
3801 tree arg0 = TREE_OPERAND (t, 0);
3802 enum tree_code op_code;
3803 tree comp_const = TREE_OPERAND (t, 1);
3804 tree minmax_const;
3805 int consts_equal, consts_lt;
3806 tree inner;
3807
3808 STRIP_SIGN_NOPS (arg0);
3809
3810 op_code = TREE_CODE (arg0);
3811 minmax_const = TREE_OPERAND (arg0, 1);
3812 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
3813 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
3814 inner = TREE_OPERAND (arg0, 0);
3815
3816 /* If something does not permit us to optimize, return the original tree. */
3817 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
3818 || TREE_CODE (comp_const) != INTEGER_CST
3819 || TREE_CONSTANT_OVERFLOW (comp_const)
3820 || TREE_CODE (minmax_const) != INTEGER_CST
3821 || TREE_CONSTANT_OVERFLOW (minmax_const))
3822 return t;
3823
3824 /* Now handle all the various comparison codes. We only handle EQ_EXPR
3825 and GT_EXPR, doing the rest with recursive calls using logical
3826 simplifications. */
3827 switch (TREE_CODE (t))
3828 {
3829 case NE_EXPR: case LT_EXPR: case LE_EXPR:
3830 return
3831 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
3832
3833 case GE_EXPR:
3834 return
3835 fold (build (TRUTH_ORIF_EXPR, type,
3836 optimize_minmax_comparison
3837 (build (EQ_EXPR, type, arg0, comp_const)),
3838 optimize_minmax_comparison
3839 (build (GT_EXPR, type, arg0, comp_const))));
3840
3841 case EQ_EXPR:
3842 if (op_code == MAX_EXPR && consts_equal)
3843 /* MAX (X, 0) == 0 -> X <= 0 */
3844 return fold (build (LE_EXPR, type, inner, comp_const));
3845
3846 else if (op_code == MAX_EXPR && consts_lt)
3847 /* MAX (X, 0) == 5 -> X == 5 */
3848 return fold (build (EQ_EXPR, type, inner, comp_const));
3849
3850 else if (op_code == MAX_EXPR)
3851 /* MAX (X, 0) == -1 -> false */
3852 return omit_one_operand (type, integer_zero_node, inner);
3853
3854 else if (consts_equal)
3855 /* MIN (X, 0) == 0 -> X >= 0 */
3856 return fold (build (GE_EXPR, type, inner, comp_const));
3857
3858 else if (consts_lt)
3859 /* MIN (X, 0) == 5 -> false */
3860 return omit_one_operand (type, integer_zero_node, inner);
3861
3862 else
3863 /* MIN (X, 0) == -1 -> X == -1 */
3864 return fold (build (EQ_EXPR, type, inner, comp_const));
3865
3866 case GT_EXPR:
3867 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
3868 /* MAX (X, 0) > 0 -> X > 0
3869 MAX (X, 0) > 5 -> X > 5 */
3870 return fold (build (GT_EXPR, type, inner, comp_const));
3871
3872 else if (op_code == MAX_EXPR)
3873 /* MAX (X, 0) > -1 -> true */
3874 return omit_one_operand (type, integer_one_node, inner);
3875
3876 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
3877 /* MIN (X, 0) > 0 -> false
3878 MIN (X, 0) > 5 -> false */
3879 return omit_one_operand (type, integer_zero_node, inner);
3880
3881 else
3882 /* MIN (X, 0) > -1 -> X > -1 */
3883 return fold (build (GT_EXPR, type, inner, comp_const));
3884
3885 default:
3886 return t;
3887 }
3888 }
3889 \f
3890 /* T is an integer expression that is being multiplied, divided, or taken a
3891 modulus (CODE says which and what kind of divide or modulus) by a
3892 constant C. See if we can eliminate that operation by folding it with
3893 other operations already in T. WIDE_TYPE, if non-null, is a type that
3894 should be used for the computation if wider than our type.
3895
3896 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
3897 (X * 2) + (Y * 4). We must, however, be assured that either the original
3898 expression would not overflow or that overflow is undefined for the type
3899 in the language in question.
3900
3901 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
3902 the machine has a multiply-accumulate insn or that this is part of an
3903 addressing calculation.
3904
3905 If we return a non-null expression, it is an equivalent form of the
3906 original computation, but need not be in the original type. */
3907
3908 static tree
3909 extract_muldiv (t, c, code, wide_type)
3910 tree t;
3911 tree c;
3912 enum tree_code code;
3913 tree wide_type;
3914 {
3915 tree type = TREE_TYPE (t);
3916 enum tree_code tcode = TREE_CODE (t);
3917 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
3918 > GET_MODE_SIZE (TYPE_MODE (type)))
3919 ? wide_type : type);
3920 tree t1, t2;
3921 int same_p = tcode == code;
3922 tree op0 = NULL_TREE, op1 = NULL_TREE;
3923
3924 /* Don't deal with constants of zero here; they confuse the code below. */
3925 if (integer_zerop (c))
3926 return NULL_TREE;
3927
3928 if (TREE_CODE_CLASS (tcode) == '1')
3929 op0 = TREE_OPERAND (t, 0);
3930
3931 if (TREE_CODE_CLASS (tcode) == '2')
3932 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
3933
3934 /* Note that we need not handle conditional operations here since fold
3935 already handles those cases. So just do arithmetic here. */
3936 switch (tcode)
3937 {
3938 case INTEGER_CST:
3939 /* For a constant, we can always simplify if we are a multiply
3940 or (for divide and modulus) if it is a multiple of our constant. */
3941 if (code == MULT_EXPR
3942 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
3943 return const_binop (code, convert (ctype, t), convert (ctype, c), 0);
3944 break;
3945
3946 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
3947 /* If op0 is an expression, and is unsigned, and the type is
3948 smaller than ctype, then we cannot widen the expression. */
3949 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
3950 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
3951 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
3952 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
3953 && TREE_UNSIGNED (TREE_TYPE (op0))
3954 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
3955 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
3956 && (GET_MODE_SIZE (TYPE_MODE (ctype))
3957 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
3958 break;
3959
3960 /* Pass the constant down and see if we can make a simplification. If
3961 we can, replace this expression with the inner simplification for
3962 possible later conversion to our or some other type. */
3963 if (0 != (t1 = extract_muldiv (op0, convert (TREE_TYPE (op0), c), code,
3964 code == MULT_EXPR ? ctype : NULL_TREE)))
3965 return t1;
3966 break;
3967
3968 case NEGATE_EXPR: case ABS_EXPR:
3969 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
3970 return fold (build1 (tcode, ctype, convert (ctype, t1)));
3971 break;
3972
3973 case MIN_EXPR: case MAX_EXPR:
3974 /* If widening the type changes the signedness, then we can't perform
3975 this optimization as that changes the result. */
3976 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
3977 break;
3978
3979 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
3980 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
3981 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
3982 {
3983 if (tree_int_cst_sgn (c) < 0)
3984 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
3985
3986 return fold (build (tcode, ctype, convert (ctype, t1),
3987 convert (ctype, t2)));
3988 }
3989 break;
3990
3991 case WITH_RECORD_EXPR:
3992 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
3993 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
3994 TREE_OPERAND (t, 1));
3995 break;
3996
3997 case SAVE_EXPR:
3998 /* If this has not been evaluated and the operand has no side effects,
3999 we can see if we can do something inside it and make a new one.
4000 Note that this test is overly conservative since we can do this
4001 if the only reason it had side effects is that it was another
4002 similar SAVE_EXPR, but that isn't worth bothering with. */
4003 if (SAVE_EXPR_RTL (t) == 0 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0))
4004 && 0 != (t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code,
4005 wide_type)))
4006 {
4007 t1 = save_expr (t1);
4008 if (SAVE_EXPR_PERSISTENT_P (t) && TREE_CODE (t1) == SAVE_EXPR)
4009 SAVE_EXPR_PERSISTENT_P (t1) = 1;
4010 if (is_pending_size (t))
4011 put_pending_size (t1);
4012 return t1;
4013 }
4014 break;
4015
4016 case LSHIFT_EXPR: case RSHIFT_EXPR:
4017 /* If the second operand is constant, this is a multiplication
4018 or floor division, by a power of two, so we can treat it that
4019 way unless the multiplier or divisor overflows. */
4020 if (TREE_CODE (op1) == INTEGER_CST
4021 /* const_binop may not detect overflow correctly,
4022 so check for it explicitly here. */
4023 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4024 && TREE_INT_CST_HIGH (op1) == 0
4025 && 0 != (t1 = convert (ctype,
4026 const_binop (LSHIFT_EXPR, size_one_node,
4027 op1, 0)))
4028 && ! TREE_OVERFLOW (t1))
4029 return extract_muldiv (build (tcode == LSHIFT_EXPR
4030 ? MULT_EXPR : FLOOR_DIV_EXPR,
4031 ctype, convert (ctype, op0), t1),
4032 c, code, wide_type);
4033 break;
4034
4035 case PLUS_EXPR: case MINUS_EXPR:
4036 /* See if we can eliminate the operation on both sides. If we can, we
4037 can return a new PLUS or MINUS. If we can't, the only remaining
4038 cases where we can do anything are if the second operand is a
4039 constant. */
4040 t1 = extract_muldiv (op0, c, code, wide_type);
4041 t2 = extract_muldiv (op1, c, code, wide_type);
4042 if (t1 != 0 && t2 != 0
4043 && (code == MULT_EXPR
4044 /* If not multiplication, we can only do this if either operand
4045 is divisible by c. */
4046 || multiple_of_p (ctype, op0, c)
4047 || multiple_of_p (ctype, op1, c)))
4048 return fold (build (tcode, ctype, convert (ctype, t1),
4049 convert (ctype, t2)));
4050
4051 /* If this was a subtraction, negate OP1 and set it to be an addition.
4052 This simplifies the logic below. */
4053 if (tcode == MINUS_EXPR)
4054 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4055
4056 if (TREE_CODE (op1) != INTEGER_CST)
4057 break;
4058
4059 /* If either OP1 or C are negative, this optimization is not safe for
4060 some of the division and remainder types while for others we need
4061 to change the code. */
4062 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4063 {
4064 if (code == CEIL_DIV_EXPR)
4065 code = FLOOR_DIV_EXPR;
4066 else if (code == FLOOR_DIV_EXPR)
4067 code = CEIL_DIV_EXPR;
4068 else if (code != MULT_EXPR
4069 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4070 break;
4071 }
4072
4073 /* If it's a multiply or a division/modulus operation of a multiple
4074 of our constant, do the operation and verify it doesn't overflow. */
4075 if (code == MULT_EXPR
4076 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4077 {
4078 op1 = const_binop (code, convert (ctype, op1), convert (ctype, c), 0);
4079 if (op1 == 0 || TREE_OVERFLOW (op1))
4080 break;
4081 }
4082 else
4083 break;
4084
4085 /* If we have an unsigned type is not a sizetype, we cannot widen
4086 the operation since it will change the result if the original
4087 computation overflowed. */
4088 if (TREE_UNSIGNED (ctype)
4089 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4090 && ctype != type)
4091 break;
4092
4093 /* If we were able to eliminate our operation from the first side,
4094 apply our operation to the second side and reform the PLUS. */
4095 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4096 return fold (build (tcode, ctype, convert (ctype, t1), op1));
4097
4098 /* The last case is if we are a multiply. In that case, we can
4099 apply the distributive law to commute the multiply and addition
4100 if the multiplication of the constants doesn't overflow. */
4101 if (code == MULT_EXPR)
4102 return fold (build (tcode, ctype, fold (build (code, ctype,
4103 convert (ctype, op0),
4104 convert (ctype, c))),
4105 op1));
4106
4107 break;
4108
4109 case MULT_EXPR:
4110 /* We have a special case here if we are doing something like
4111 (C * 8) % 4 since we know that's zero. */
4112 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4113 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4114 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4115 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4116 return omit_one_operand (type, integer_zero_node, op0);
4117
4118 /* ... fall through ... */
4119
4120 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4121 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4122 /* If we can extract our operation from the LHS, do so and return a
4123 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4124 do something only if the second operand is a constant. */
4125 if (same_p
4126 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4127 return fold (build (tcode, ctype, convert (ctype, t1),
4128 convert (ctype, op1)));
4129 else if (tcode == MULT_EXPR && code == MULT_EXPR
4130 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4131 return fold (build (tcode, ctype, convert (ctype, op0),
4132 convert (ctype, t1)));
4133 else if (TREE_CODE (op1) != INTEGER_CST)
4134 return 0;
4135
4136 /* If these are the same operation types, we can associate them
4137 assuming no overflow. */
4138 if (tcode == code
4139 && 0 != (t1 = const_binop (MULT_EXPR, convert (ctype, op1),
4140 convert (ctype, c), 0))
4141 && ! TREE_OVERFLOW (t1))
4142 return fold (build (tcode, ctype, convert (ctype, op0), t1));
4143
4144 /* If these operations "cancel" each other, we have the main
4145 optimizations of this pass, which occur when either constant is a
4146 multiple of the other, in which case we replace this with either an
4147 operation or CODE or TCODE.
4148
4149 If we have an unsigned type that is not a sizetype, we cannot do
4150 this since it will change the result if the original computation
4151 overflowed. */
4152 if ((! TREE_UNSIGNED (ctype)
4153 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4154 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4155 || (tcode == MULT_EXPR
4156 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4157 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4158 {
4159 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4160 return fold (build (tcode, ctype, convert (ctype, op0),
4161 convert (ctype,
4162 const_binop (TRUNC_DIV_EXPR,
4163 op1, c, 0))));
4164 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4165 return fold (build (code, ctype, convert (ctype, op0),
4166 convert (ctype,
4167 const_binop (TRUNC_DIV_EXPR,
4168 c, op1, 0))));
4169 }
4170 break;
4171
4172 default:
4173 break;
4174 }
4175
4176 return 0;
4177 }
4178 \f
4179 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4180 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4181 that we may sometimes modify the tree. */
4182
4183 static tree
4184 strip_compound_expr (t, s)
4185 tree t;
4186 tree s;
4187 {
4188 enum tree_code code = TREE_CODE (t);
4189
4190 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4191 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4192 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4193 return TREE_OPERAND (t, 1);
4194
4195 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4196 don't bother handling any other types. */
4197 else if (code == COND_EXPR)
4198 {
4199 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4200 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4201 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4202 }
4203 else if (TREE_CODE_CLASS (code) == '1')
4204 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4205 else if (TREE_CODE_CLASS (code) == '<'
4206 || TREE_CODE_CLASS (code) == '2')
4207 {
4208 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4209 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4210 }
4211
4212 return t;
4213 }
4214 \f
4215 /* Return a node which has the indicated constant VALUE (either 0 or
4216 1), and is of the indicated TYPE. */
4217
4218 static tree
4219 constant_boolean_node (value, type)
4220 int value;
4221 tree type;
4222 {
4223 if (type == integer_type_node)
4224 return value ? integer_one_node : integer_zero_node;
4225 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4226 return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4227 integer_zero_node);
4228 else
4229 {
4230 tree t = build_int_2 (value, 0);
4231
4232 TREE_TYPE (t) = type;
4233 return t;
4234 }
4235 }
4236
4237 /* Utility function for the following routine, to see how complex a nesting of
4238 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4239 we don't care (to avoid spending too much time on complex expressions.). */
4240
4241 static int
4242 count_cond (expr, lim)
4243 tree expr;
4244 int lim;
4245 {
4246 int ctrue, cfalse;
4247
4248 if (TREE_CODE (expr) != COND_EXPR)
4249 return 0;
4250 else if (lim <= 0)
4251 return 0;
4252
4253 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4254 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4255 return MIN (lim, 1 + ctrue + cfalse);
4256 }
4257
4258 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4259 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4260 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4261 expression, and ARG to `a'. If COND_FIRST_P is non-zero, then the
4262 COND is the first argument to CODE; otherwise (as in the example
4263 given here), it is the second argument. TYPE is the type of the
4264 original expression. */
4265
4266 static tree
4267 fold_binary_op_with_conditional_arg (code, type, cond, arg, cond_first_p)
4268 enum tree_code code;
4269 tree type;
4270 tree cond;
4271 tree arg;
4272 int cond_first_p;
4273 {
4274 tree test, true_value, false_value;
4275 tree lhs = NULL_TREE;
4276 tree rhs = NULL_TREE;
4277 /* In the end, we'll produce a COND_EXPR. Both arms of the
4278 conditional expression will be binary operations. The left-hand
4279 side of the expression to be executed if the condition is true
4280 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4281 of the expression to be executed if the condition is true will be
4282 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4283 but apply to the expression to be executed if the conditional is
4284 false. */
4285 tree *true_lhs;
4286 tree *true_rhs;
4287 tree *false_lhs;
4288 tree *false_rhs;
4289 /* These are the codes to use for the left-hand side and right-hand
4290 side of the COND_EXPR. Normally, they are the same as CODE. */
4291 enum tree_code lhs_code = code;
4292 enum tree_code rhs_code = code;
4293 /* And these are the types of the expressions. */
4294 tree lhs_type = type;
4295 tree rhs_type = type;
4296
4297 if (cond_first_p)
4298 {
4299 true_rhs = false_rhs = &arg;
4300 true_lhs = &true_value;
4301 false_lhs = &false_value;
4302 }
4303 else
4304 {
4305 true_lhs = false_lhs = &arg;
4306 true_rhs = &true_value;
4307 false_rhs = &false_value;
4308 }
4309
4310 if (TREE_CODE (cond) == COND_EXPR)
4311 {
4312 test = TREE_OPERAND (cond, 0);
4313 true_value = TREE_OPERAND (cond, 1);
4314 false_value = TREE_OPERAND (cond, 2);
4315 /* If this operand throws an expression, then it does not make
4316 sense to try to perform a logical or arithmetic operation
4317 involving it. Instead of building `a + throw 3' for example,
4318 we simply build `a, throw 3'. */
4319 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4320 {
4321 lhs_code = COMPOUND_EXPR;
4322 if (!cond_first_p)
4323 lhs_type = void_type_node;
4324 }
4325 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4326 {
4327 rhs_code = COMPOUND_EXPR;
4328 if (!cond_first_p)
4329 rhs_type = void_type_node;
4330 }
4331 }
4332 else
4333 {
4334 tree testtype = TREE_TYPE (cond);
4335 test = cond;
4336 true_value = convert (testtype, integer_one_node);
4337 false_value = convert (testtype, integer_zero_node);
4338 }
4339
4340 /* If ARG is complex we want to make sure we only evaluate
4341 it once. Though this is only required if it is volatile, it
4342 might be more efficient even if it is not. However, if we
4343 succeed in folding one part to a constant, we do not need
4344 to make this SAVE_EXPR. Since we do this optimization
4345 primarily to see if we do end up with constant and this
4346 SAVE_EXPR interferes with later optimizations, suppressing
4347 it when we can is important.
4348
4349 If we are not in a function, we can't make a SAVE_EXPR, so don't
4350 try to do so. Don't try to see if the result is a constant
4351 if an arm is a COND_EXPR since we get exponential behavior
4352 in that case. */
4353
4354 if (TREE_CODE (arg) != SAVE_EXPR && ! TREE_CONSTANT (arg)
4355 && (*lang_hooks.decls.global_bindings_p) () == 0
4356 && ((TREE_CODE (arg) != VAR_DECL
4357 && TREE_CODE (arg) != PARM_DECL)
4358 || TREE_SIDE_EFFECTS (arg)))
4359 {
4360 if (TREE_CODE (true_value) != COND_EXPR)
4361 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4362
4363 if (TREE_CODE (false_value) != COND_EXPR)
4364 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4365
4366 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4367 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4368 arg = save_expr (arg), lhs = rhs = 0;
4369 }
4370
4371 if (lhs == 0)
4372 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4373 if (rhs == 0)
4374 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4375
4376 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4377
4378 if (TREE_CODE (arg) == SAVE_EXPR)
4379 return build (COMPOUND_EXPR, type,
4380 convert (void_type_node, arg),
4381 strip_compound_expr (test, arg));
4382 else
4383 return convert (type, test);
4384 }
4385
4386 \f
4387 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4388
4389 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4390 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4391 ADDEND is the same as X.
4392
4393 X + 0 and X - 0 both give X when X is NaN, infinite, or non-zero
4394 and finite. The problematic cases are when X is zero, and its mode
4395 has signed zeros. In the case of rounding towards -infinity,
4396 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4397 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4398
4399 static bool
4400 fold_real_zero_addition_p (type, addend, negate)
4401 tree type, addend;
4402 int negate;
4403 {
4404 if (!real_zerop (addend))
4405 return false;
4406
4407 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4408 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4409 return true;
4410
4411 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4412 if (TREE_CODE (addend) == REAL_CST
4413 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4414 negate = !negate;
4415
4416 /* The mode has signed zeros, and we have to honor their sign.
4417 In this situation, there is only one case we can return true for.
4418 X - 0 is the same as X unless rounding towards -infinity is
4419 supported. */
4420 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4421 }
4422
4423
4424 /* Perform constant folding and related simplification of EXPR.
4425 The related simplifications include x*1 => x, x*0 => 0, etc.,
4426 and application of the associative law.
4427 NOP_EXPR conversions may be removed freely (as long as we
4428 are careful not to change the C type of the overall expression)
4429 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4430 but we can constant-fold them if they have constant operands. */
4431
4432 tree
4433 fold (expr)
4434 tree expr;
4435 {
4436 tree t = expr;
4437 tree t1 = NULL_TREE;
4438 tree tem;
4439 tree type = TREE_TYPE (expr);
4440 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4441 enum tree_code code = TREE_CODE (t);
4442 int kind = TREE_CODE_CLASS (code);
4443 int invert;
4444 /* WINS will be nonzero when the switch is done
4445 if all operands are constant. */
4446 int wins = 1;
4447
4448 /* Don't try to process an RTL_EXPR since its operands aren't trees.
4449 Likewise for a SAVE_EXPR that's already been evaluated. */
4450 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
4451 return t;
4452
4453 /* Return right away if a constant. */
4454 if (kind == 'c')
4455 return t;
4456
4457 #ifdef MAX_INTEGER_COMPUTATION_MODE
4458 check_max_integer_computation_mode (expr);
4459 #endif
4460
4461 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
4462 {
4463 tree subop;
4464
4465 /* Special case for conversion ops that can have fixed point args. */
4466 arg0 = TREE_OPERAND (t, 0);
4467
4468 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
4469 if (arg0 != 0)
4470 STRIP_SIGN_NOPS (arg0);
4471
4472 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
4473 subop = TREE_REALPART (arg0);
4474 else
4475 subop = arg0;
4476
4477 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
4478 && TREE_CODE (subop) != REAL_CST
4479 )
4480 /* Note that TREE_CONSTANT isn't enough:
4481 static var addresses are constant but we can't
4482 do arithmetic on them. */
4483 wins = 0;
4484 }
4485 else if (IS_EXPR_CODE_CLASS (kind) || kind == 'r')
4486 {
4487 int len = first_rtl_op (code);
4488 int i;
4489 for (i = 0; i < len; i++)
4490 {
4491 tree op = TREE_OPERAND (t, i);
4492 tree subop;
4493
4494 if (op == 0)
4495 continue; /* Valid for CALL_EXPR, at least. */
4496
4497 if (kind == '<' || code == RSHIFT_EXPR)
4498 {
4499 /* Signedness matters here. Perhaps we can refine this
4500 later. */
4501 STRIP_SIGN_NOPS (op);
4502 }
4503 else
4504 /* Strip any conversions that don't change the mode. */
4505 STRIP_NOPS (op);
4506
4507 if (TREE_CODE (op) == COMPLEX_CST)
4508 subop = TREE_REALPART (op);
4509 else
4510 subop = op;
4511
4512 if (TREE_CODE (subop) != INTEGER_CST
4513 && TREE_CODE (subop) != REAL_CST)
4514 /* Note that TREE_CONSTANT isn't enough:
4515 static var addresses are constant but we can't
4516 do arithmetic on them. */
4517 wins = 0;
4518
4519 if (i == 0)
4520 arg0 = op;
4521 else if (i == 1)
4522 arg1 = op;
4523 }
4524 }
4525
4526 /* If this is a commutative operation, and ARG0 is a constant, move it
4527 to ARG1 to reduce the number of tests below. */
4528 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
4529 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
4530 || code == BIT_AND_EXPR)
4531 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
4532 {
4533 tem = arg0; arg0 = arg1; arg1 = tem;
4534
4535 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
4536 TREE_OPERAND (t, 1) = tem;
4537 }
4538
4539 /* Now WINS is set as described above,
4540 ARG0 is the first operand of EXPR,
4541 and ARG1 is the second operand (if it has more than one operand).
4542
4543 First check for cases where an arithmetic operation is applied to a
4544 compound, conditional, or comparison operation. Push the arithmetic
4545 operation inside the compound or conditional to see if any folding
4546 can then be done. Convert comparison to conditional for this purpose.
4547 The also optimizes non-constant cases that used to be done in
4548 expand_expr.
4549
4550 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
4551 one of the operands is a comparison and the other is a comparison, a
4552 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
4553 code below would make the expression more complex. Change it to a
4554 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
4555 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
4556
4557 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
4558 || code == EQ_EXPR || code == NE_EXPR)
4559 && ((truth_value_p (TREE_CODE (arg0))
4560 && (truth_value_p (TREE_CODE (arg1))
4561 || (TREE_CODE (arg1) == BIT_AND_EXPR
4562 && integer_onep (TREE_OPERAND (arg1, 1)))))
4563 || (truth_value_p (TREE_CODE (arg1))
4564 && (truth_value_p (TREE_CODE (arg0))
4565 || (TREE_CODE (arg0) == BIT_AND_EXPR
4566 && integer_onep (TREE_OPERAND (arg0, 1)))))))
4567 {
4568 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
4569 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
4570 : TRUTH_XOR_EXPR,
4571 type, arg0, arg1));
4572
4573 if (code == EQ_EXPR)
4574 t = invert_truthvalue (t);
4575
4576 return t;
4577 }
4578
4579 if (TREE_CODE_CLASS (code) == '1')
4580 {
4581 if (TREE_CODE (arg0) == COMPOUND_EXPR)
4582 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4583 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
4584 else if (TREE_CODE (arg0) == COND_EXPR)
4585 {
4586 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
4587 fold (build1 (code, type, TREE_OPERAND (arg0, 1))),
4588 fold (build1 (code, type, TREE_OPERAND (arg0, 2)))));
4589
4590 /* If this was a conversion, and all we did was to move into
4591 inside the COND_EXPR, bring it back out. But leave it if
4592 it is a conversion from integer to integer and the
4593 result precision is no wider than a word since such a
4594 conversion is cheap and may be optimized away by combine,
4595 while it couldn't if it were outside the COND_EXPR. Then return
4596 so we don't get into an infinite recursion loop taking the
4597 conversion out and then back in. */
4598
4599 if ((code == NOP_EXPR || code == CONVERT_EXPR
4600 || code == NON_LVALUE_EXPR)
4601 && TREE_CODE (t) == COND_EXPR
4602 && TREE_CODE (TREE_OPERAND (t, 1)) == code
4603 && TREE_CODE (TREE_OPERAND (t, 2)) == code
4604 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
4605 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
4606 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
4607 && (INTEGRAL_TYPE_P
4608 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
4609 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
4610 t = build1 (code, type,
4611 build (COND_EXPR,
4612 TREE_TYPE (TREE_OPERAND
4613 (TREE_OPERAND (t, 1), 0)),
4614 TREE_OPERAND (t, 0),
4615 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
4616 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
4617 return t;
4618 }
4619 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
4620 return fold (build (COND_EXPR, type, arg0,
4621 fold (build1 (code, type, integer_one_node)),
4622 fold (build1 (code, type, integer_zero_node))));
4623 }
4624 else if (TREE_CODE_CLASS (code) == '2'
4625 || TREE_CODE_CLASS (code) == '<')
4626 {
4627 if (TREE_CODE (arg1) == COMPOUND_EXPR)
4628 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4629 fold (build (code, type,
4630 arg0, TREE_OPERAND (arg1, 1))));
4631 else if ((TREE_CODE (arg1) == COND_EXPR
4632 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
4633 && TREE_CODE_CLASS (code) != '<'))
4634 && (TREE_CODE (arg0) != COND_EXPR
4635 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4636 && (! TREE_SIDE_EFFECTS (arg0)
4637 || ((*lang_hooks.decls.global_bindings_p) () == 0
4638 && ! contains_placeholder_p (arg0))))
4639 return
4640 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
4641 /*cond_first_p=*/0);
4642 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
4643 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4644 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4645 else if ((TREE_CODE (arg0) == COND_EXPR
4646 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4647 && TREE_CODE_CLASS (code) != '<'))
4648 && (TREE_CODE (arg1) != COND_EXPR
4649 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4650 && (! TREE_SIDE_EFFECTS (arg1)
4651 || ((*lang_hooks.decls.global_bindings_p) () == 0
4652 && ! contains_placeholder_p (arg1))))
4653 return
4654 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
4655 /*cond_first_p=*/1);
4656 }
4657 else if (TREE_CODE_CLASS (code) == '<'
4658 && TREE_CODE (arg0) == COMPOUND_EXPR)
4659 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4660 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4661 else if (TREE_CODE_CLASS (code) == '<'
4662 && TREE_CODE (arg1) == COMPOUND_EXPR)
4663 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4664 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
4665
4666 switch (code)
4667 {
4668 case INTEGER_CST:
4669 case REAL_CST:
4670 case VECTOR_CST:
4671 case STRING_CST:
4672 case COMPLEX_CST:
4673 case CONSTRUCTOR:
4674 return t;
4675
4676 case CONST_DECL:
4677 return fold (DECL_INITIAL (t));
4678
4679 case NOP_EXPR:
4680 case FLOAT_EXPR:
4681 case CONVERT_EXPR:
4682 case FIX_TRUNC_EXPR:
4683 /* Other kinds of FIX are not handled properly by fold_convert. */
4684
4685 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
4686 return TREE_OPERAND (t, 0);
4687
4688 /* Handle cases of two conversions in a row. */
4689 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
4690 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
4691 {
4692 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4693 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
4694 tree final_type = TREE_TYPE (t);
4695 int inside_int = INTEGRAL_TYPE_P (inside_type);
4696 int inside_ptr = POINTER_TYPE_P (inside_type);
4697 int inside_float = FLOAT_TYPE_P (inside_type);
4698 unsigned int inside_prec = TYPE_PRECISION (inside_type);
4699 int inside_unsignedp = TREE_UNSIGNED (inside_type);
4700 int inter_int = INTEGRAL_TYPE_P (inter_type);
4701 int inter_ptr = POINTER_TYPE_P (inter_type);
4702 int inter_float = FLOAT_TYPE_P (inter_type);
4703 unsigned int inter_prec = TYPE_PRECISION (inter_type);
4704 int inter_unsignedp = TREE_UNSIGNED (inter_type);
4705 int final_int = INTEGRAL_TYPE_P (final_type);
4706 int final_ptr = POINTER_TYPE_P (final_type);
4707 int final_float = FLOAT_TYPE_P (final_type);
4708 unsigned int final_prec = TYPE_PRECISION (final_type);
4709 int final_unsignedp = TREE_UNSIGNED (final_type);
4710
4711 /* In addition to the cases of two conversions in a row
4712 handled below, if we are converting something to its own
4713 type via an object of identical or wider precision, neither
4714 conversion is needed. */
4715 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
4716 && ((inter_int && final_int) || (inter_float && final_float))
4717 && inter_prec >= final_prec)
4718 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4719
4720 /* Likewise, if the intermediate and final types are either both
4721 float or both integer, we don't need the middle conversion if
4722 it is wider than the final type and doesn't change the signedness
4723 (for integers). Avoid this if the final type is a pointer
4724 since then we sometimes need the inner conversion. Likewise if
4725 the outer has a precision not equal to the size of its mode. */
4726 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
4727 || (inter_float && inside_float))
4728 && inter_prec >= inside_prec
4729 && (inter_float || inter_unsignedp == inside_unsignedp)
4730 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4731 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4732 && ! final_ptr)
4733 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4734
4735 /* If we have a sign-extension of a zero-extended value, we can
4736 replace that by a single zero-extension. */
4737 if (inside_int && inter_int && final_int
4738 && inside_prec < inter_prec && inter_prec < final_prec
4739 && inside_unsignedp && !inter_unsignedp)
4740 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4741
4742 /* Two conversions in a row are not needed unless:
4743 - some conversion is floating-point (overstrict for now), or
4744 - the intermediate type is narrower than both initial and
4745 final, or
4746 - the intermediate type and innermost type differ in signedness,
4747 and the outermost type is wider than the intermediate, or
4748 - the initial type is a pointer type and the precisions of the
4749 intermediate and final types differ, or
4750 - the final type is a pointer type and the precisions of the
4751 initial and intermediate types differ. */
4752 if (! inside_float && ! inter_float && ! final_float
4753 && (inter_prec > inside_prec || inter_prec > final_prec)
4754 && ! (inside_int && inter_int
4755 && inter_unsignedp != inside_unsignedp
4756 && inter_prec < final_prec)
4757 && ((inter_unsignedp && inter_prec > inside_prec)
4758 == (final_unsignedp && final_prec > inter_prec))
4759 && ! (inside_ptr && inter_prec != final_prec)
4760 && ! (final_ptr && inside_prec != inter_prec)
4761 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4762 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4763 && ! final_ptr)
4764 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4765 }
4766
4767 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
4768 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
4769 /* Detect assigning a bitfield. */
4770 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
4771 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
4772 {
4773 /* Don't leave an assignment inside a conversion
4774 unless assigning a bitfield. */
4775 tree prev = TREE_OPERAND (t, 0);
4776 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
4777 /* First do the assignment, then return converted constant. */
4778 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
4779 TREE_USED (t) = 1;
4780 return t;
4781 }
4782
4783 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
4784 constants (if x has signed type, the sign bit cannot be set
4785 in c). This folds extension into the BIT_AND_EXPR. */
4786 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
4787 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
4788 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
4789 {
4790 tree and = TREE_OPERAND (t, 0);
4791 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
4792 int change = 0;
4793
4794 if (TREE_UNSIGNED (TREE_TYPE (and))
4795 || (TYPE_PRECISION (TREE_TYPE (t))
4796 <= TYPE_PRECISION (TREE_TYPE (and))))
4797 change = 1;
4798 else if (TYPE_PRECISION (TREE_TYPE (and1))
4799 <= HOST_BITS_PER_WIDE_INT
4800 && host_integerp (and1, 1))
4801 {
4802 unsigned HOST_WIDE_INT cst;
4803
4804 cst = tree_low_cst (and1, 1);
4805 cst &= (HOST_WIDE_INT) -1
4806 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
4807 change = (cst == 0);
4808 #ifdef LOAD_EXTEND_OP
4809 if (change
4810 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
4811 == ZERO_EXTEND))
4812 {
4813 tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
4814 and0 = convert (uns, and0);
4815 and1 = convert (uns, and1);
4816 }
4817 #endif
4818 }
4819 if (change)
4820 return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
4821 convert (TREE_TYPE (t), and0),
4822 convert (TREE_TYPE (t), and1)));
4823 }
4824
4825 if (!wins)
4826 {
4827 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
4828 return t;
4829 }
4830 return fold_convert (t, arg0);
4831
4832 case VIEW_CONVERT_EXPR:
4833 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
4834 return build1 (VIEW_CONVERT_EXPR, type,
4835 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4836 return t;
4837
4838 case COMPONENT_REF:
4839 if (TREE_CODE (arg0) == CONSTRUCTOR)
4840 {
4841 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
4842 if (m)
4843 t = TREE_VALUE (m);
4844 }
4845 return t;
4846
4847 case RANGE_EXPR:
4848 TREE_CONSTANT (t) = wins;
4849 return t;
4850
4851 case NEGATE_EXPR:
4852 if (wins)
4853 {
4854 if (TREE_CODE (arg0) == INTEGER_CST)
4855 {
4856 unsigned HOST_WIDE_INT low;
4857 HOST_WIDE_INT high;
4858 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
4859 TREE_INT_CST_HIGH (arg0),
4860 &low, &high);
4861 t = build_int_2 (low, high);
4862 TREE_TYPE (t) = type;
4863 TREE_OVERFLOW (t)
4864 = (TREE_OVERFLOW (arg0)
4865 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
4866 TREE_CONSTANT_OVERFLOW (t)
4867 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
4868 }
4869 else if (TREE_CODE (arg0) == REAL_CST)
4870 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
4871 }
4872 else if (TREE_CODE (arg0) == NEGATE_EXPR)
4873 return TREE_OPERAND (arg0, 0);
4874
4875 /* Convert - (a - b) to (b - a) for non-floating-point. */
4876 else if (TREE_CODE (arg0) == MINUS_EXPR
4877 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
4878 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
4879 TREE_OPERAND (arg0, 0));
4880
4881 return t;
4882
4883 case ABS_EXPR:
4884 if (wins)
4885 {
4886 if (TREE_CODE (arg0) == INTEGER_CST)
4887 {
4888 /* If the value is unsigned, then the absolute value is
4889 the same as the ordinary value. */
4890 if (TREE_UNSIGNED (type))
4891 return arg0;
4892 /* Similarly, if the value is non-negative. */
4893 else if (INT_CST_LT (integer_minus_one_node, arg0))
4894 return arg0;
4895 /* If the value is negative, then the absolute value is
4896 its negation. */
4897 else
4898 {
4899 unsigned HOST_WIDE_INT low;
4900 HOST_WIDE_INT high;
4901 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
4902 TREE_INT_CST_HIGH (arg0),
4903 &low, &high);
4904 t = build_int_2 (low, high);
4905 TREE_TYPE (t) = type;
4906 TREE_OVERFLOW (t)
4907 = (TREE_OVERFLOW (arg0)
4908 | force_fit_type (t, overflow));
4909 TREE_CONSTANT_OVERFLOW (t)
4910 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
4911 }
4912 }
4913 else if (TREE_CODE (arg0) == REAL_CST)
4914 {
4915 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
4916 t = build_real (type,
4917 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
4918 }
4919 }
4920 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
4921 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
4922 return t;
4923
4924 case CONJ_EXPR:
4925 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
4926 return convert (type, arg0);
4927 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
4928 return build (COMPLEX_EXPR, type,
4929 TREE_OPERAND (arg0, 0),
4930 negate_expr (TREE_OPERAND (arg0, 1)));
4931 else if (TREE_CODE (arg0) == COMPLEX_CST)
4932 return build_complex (type, TREE_REALPART (arg0),
4933 negate_expr (TREE_IMAGPART (arg0)));
4934 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
4935 return fold (build (TREE_CODE (arg0), type,
4936 fold (build1 (CONJ_EXPR, type,
4937 TREE_OPERAND (arg0, 0))),
4938 fold (build1 (CONJ_EXPR,
4939 type, TREE_OPERAND (arg0, 1)))));
4940 else if (TREE_CODE (arg0) == CONJ_EXPR)
4941 return TREE_OPERAND (arg0, 0);
4942 return t;
4943
4944 case BIT_NOT_EXPR:
4945 if (wins)
4946 {
4947 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
4948 ~ TREE_INT_CST_HIGH (arg0));
4949 TREE_TYPE (t) = type;
4950 force_fit_type (t, 0);
4951 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
4952 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
4953 }
4954 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
4955 return TREE_OPERAND (arg0, 0);
4956 return t;
4957
4958 case PLUS_EXPR:
4959 /* A + (-B) -> A - B */
4960 if (TREE_CODE (arg1) == NEGATE_EXPR)
4961 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
4962 /* (-A) + B -> B - A */
4963 if (TREE_CODE (arg0) == NEGATE_EXPR)
4964 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
4965 else if (! FLOAT_TYPE_P (type))
4966 {
4967 if (integer_zerop (arg1))
4968 return non_lvalue (convert (type, arg0));
4969
4970 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
4971 with a constant, and the two constants have no bits in common,
4972 we should treat this as a BIT_IOR_EXPR since this may produce more
4973 simplifications. */
4974 if (TREE_CODE (arg0) == BIT_AND_EXPR
4975 && TREE_CODE (arg1) == BIT_AND_EXPR
4976 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
4977 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
4978 && integer_zerop (const_binop (BIT_AND_EXPR,
4979 TREE_OPERAND (arg0, 1),
4980 TREE_OPERAND (arg1, 1), 0)))
4981 {
4982 code = BIT_IOR_EXPR;
4983 goto bit_ior;
4984 }
4985
4986 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
4987 (plus (plus (mult) (mult)) (foo)) so that we can
4988 take advantage of the factoring cases below. */
4989 if ((TREE_CODE (arg0) == PLUS_EXPR
4990 && TREE_CODE (arg1) == MULT_EXPR)
4991 || (TREE_CODE (arg1) == PLUS_EXPR
4992 && TREE_CODE (arg0) == MULT_EXPR))
4993 {
4994 tree parg0, parg1, parg, marg;
4995
4996 if (TREE_CODE (arg0) == PLUS_EXPR)
4997 parg = arg0, marg = arg1;
4998 else
4999 parg = arg1, marg = arg0;
5000 parg0 = TREE_OPERAND (parg, 0);
5001 parg1 = TREE_OPERAND (parg, 1);
5002 STRIP_NOPS (parg0);
5003 STRIP_NOPS (parg1);
5004
5005 if (TREE_CODE (parg0) == MULT_EXPR
5006 && TREE_CODE (parg1) != MULT_EXPR)
5007 return fold (build (PLUS_EXPR, type,
5008 fold (build (PLUS_EXPR, type, parg0, marg)),
5009 parg1));
5010 if (TREE_CODE (parg0) != MULT_EXPR
5011 && TREE_CODE (parg1) == MULT_EXPR)
5012 return fold (build (PLUS_EXPR, type,
5013 fold (build (PLUS_EXPR, type, parg1, marg)),
5014 parg0));
5015 }
5016
5017 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5018 {
5019 tree arg00, arg01, arg10, arg11;
5020 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5021
5022 /* (A * C) + (B * C) -> (A+B) * C.
5023 We are most concerned about the case where C is a constant,
5024 but other combinations show up during loop reduction. Since
5025 it is not difficult, try all four possibilities. */
5026
5027 arg00 = TREE_OPERAND (arg0, 0);
5028 arg01 = TREE_OPERAND (arg0, 1);
5029 arg10 = TREE_OPERAND (arg1, 0);
5030 arg11 = TREE_OPERAND (arg1, 1);
5031 same = NULL_TREE;
5032
5033 if (operand_equal_p (arg01, arg11, 0))
5034 same = arg01, alt0 = arg00, alt1 = arg10;
5035 else if (operand_equal_p (arg00, arg10, 0))
5036 same = arg00, alt0 = arg01, alt1 = arg11;
5037 else if (operand_equal_p (arg00, arg11, 0))
5038 same = arg00, alt0 = arg01, alt1 = arg10;
5039 else if (operand_equal_p (arg01, arg10, 0))
5040 same = arg01, alt0 = arg00, alt1 = arg11;
5041
5042 /* No identical multiplicands; see if we can find a common
5043 power-of-two factor in non-power-of-two multiplies. This
5044 can help in multi-dimensional array access. */
5045 else if (TREE_CODE (arg01) == INTEGER_CST
5046 && TREE_CODE (arg11) == INTEGER_CST
5047 && TREE_INT_CST_HIGH (arg01) == 0
5048 && TREE_INT_CST_HIGH (arg11) == 0)
5049 {
5050 HOST_WIDE_INT int01, int11, tmp;
5051 int01 = TREE_INT_CST_LOW (arg01);
5052 int11 = TREE_INT_CST_LOW (arg11);
5053
5054 /* Move min of absolute values to int11. */
5055 if ((int01 >= 0 ? int01 : -int01)
5056 < (int11 >= 0 ? int11 : -int11))
5057 {
5058 tmp = int01, int01 = int11, int11 = tmp;
5059 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5060 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5061 }
5062
5063 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5064 {
5065 alt0 = fold (build (MULT_EXPR, type, arg00,
5066 build_int_2 (int01 / int11, 0)));
5067 alt1 = arg10;
5068 same = arg11;
5069 }
5070 }
5071
5072 if (same)
5073 return fold (build (MULT_EXPR, type,
5074 fold (build (PLUS_EXPR, type, alt0, alt1)),
5075 same));
5076 }
5077 }
5078
5079 /* See if ARG1 is zero and X + ARG1 reduces to X. */
5080 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
5081 return non_lvalue (convert (type, arg0));
5082
5083 /* Likewise if the operands are reversed. */
5084 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5085 return non_lvalue (convert (type, arg1));
5086
5087 bit_rotate:
5088 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5089 is a rotate of A by C1 bits. */
5090 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5091 is a rotate of A by B bits. */
5092 {
5093 enum tree_code code0, code1;
5094 code0 = TREE_CODE (arg0);
5095 code1 = TREE_CODE (arg1);
5096 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5097 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5098 && operand_equal_p (TREE_OPERAND (arg0, 0),
5099 TREE_OPERAND (arg1, 0), 0)
5100 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5101 {
5102 tree tree01, tree11;
5103 enum tree_code code01, code11;
5104
5105 tree01 = TREE_OPERAND (arg0, 1);
5106 tree11 = TREE_OPERAND (arg1, 1);
5107 STRIP_NOPS (tree01);
5108 STRIP_NOPS (tree11);
5109 code01 = TREE_CODE (tree01);
5110 code11 = TREE_CODE (tree11);
5111 if (code01 == INTEGER_CST
5112 && code11 == INTEGER_CST
5113 && TREE_INT_CST_HIGH (tree01) == 0
5114 && TREE_INT_CST_HIGH (tree11) == 0
5115 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5116 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5117 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5118 code0 == LSHIFT_EXPR ? tree01 : tree11);
5119 else if (code11 == MINUS_EXPR)
5120 {
5121 tree tree110, tree111;
5122 tree110 = TREE_OPERAND (tree11, 0);
5123 tree111 = TREE_OPERAND (tree11, 1);
5124 STRIP_NOPS (tree110);
5125 STRIP_NOPS (tree111);
5126 if (TREE_CODE (tree110) == INTEGER_CST
5127 && 0 == compare_tree_int (tree110,
5128 TYPE_PRECISION
5129 (TREE_TYPE (TREE_OPERAND
5130 (arg0, 0))))
5131 && operand_equal_p (tree01, tree111, 0))
5132 return build ((code0 == LSHIFT_EXPR
5133 ? LROTATE_EXPR
5134 : RROTATE_EXPR),
5135 type, TREE_OPERAND (arg0, 0), tree01);
5136 }
5137 else if (code01 == MINUS_EXPR)
5138 {
5139 tree tree010, tree011;
5140 tree010 = TREE_OPERAND (tree01, 0);
5141 tree011 = TREE_OPERAND (tree01, 1);
5142 STRIP_NOPS (tree010);
5143 STRIP_NOPS (tree011);
5144 if (TREE_CODE (tree010) == INTEGER_CST
5145 && 0 == compare_tree_int (tree010,
5146 TYPE_PRECISION
5147 (TREE_TYPE (TREE_OPERAND
5148 (arg0, 0))))
5149 && operand_equal_p (tree11, tree011, 0))
5150 return build ((code0 != LSHIFT_EXPR
5151 ? LROTATE_EXPR
5152 : RROTATE_EXPR),
5153 type, TREE_OPERAND (arg0, 0), tree11);
5154 }
5155 }
5156 }
5157
5158 associate:
5159 /* In most languages, can't associate operations on floats through
5160 parentheses. Rather than remember where the parentheses were, we
5161 don't associate floats at all. It shouldn't matter much. However,
5162 associating multiplications is only very slightly inaccurate, so do
5163 that if -funsafe-math-optimizations is specified. */
5164
5165 if (! wins
5166 && (! FLOAT_TYPE_P (type)
5167 || (flag_unsafe_math_optimizations && code == MULT_EXPR)))
5168 {
5169 tree var0, con0, lit0, minus_lit0;
5170 tree var1, con1, lit1, minus_lit1;
5171
5172 /* Split both trees into variables, constants, and literals. Then
5173 associate each group together, the constants with literals,
5174 then the result with variables. This increases the chances of
5175 literals being recombined later and of generating relocatable
5176 expressions for the sum of a constant and literal. */
5177 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
5178 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
5179 code == MINUS_EXPR);
5180
5181 /* Only do something if we found more than two objects. Otherwise,
5182 nothing has changed and we risk infinite recursion. */
5183 if (2 < ((var0 != 0) + (var1 != 0)
5184 + (con0 != 0) + (con1 != 0)
5185 + (lit0 != 0) + (lit1 != 0)
5186 + (minus_lit0 != 0) + (minus_lit1 != 0)))
5187 {
5188 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
5189 if (code == MINUS_EXPR)
5190 code = PLUS_EXPR;
5191
5192 var0 = associate_trees (var0, var1, code, type);
5193 con0 = associate_trees (con0, con1, code, type);
5194 lit0 = associate_trees (lit0, lit1, code, type);
5195 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
5196
5197 /* Preserve the MINUS_EXPR if the negative part of the literal is
5198 greater than the positive part. Otherwise, the multiplicative
5199 folding code (i.e extract_muldiv) may be fooled in case
5200 unsigned constants are substracted, like in the following
5201 example: ((X*2 + 4) - 8U)/2. */
5202 if (minus_lit0 && lit0)
5203 {
5204 if (tree_int_cst_lt (lit0, minus_lit0))
5205 {
5206 minus_lit0 = associate_trees (minus_lit0, lit0,
5207 MINUS_EXPR, type);
5208 lit0 = 0;
5209 }
5210 else
5211 {
5212 lit0 = associate_trees (lit0, minus_lit0,
5213 MINUS_EXPR, type);
5214 minus_lit0 = 0;
5215 }
5216 }
5217 if (minus_lit0)
5218 {
5219 if (con0 == 0)
5220 return convert (type, associate_trees (var0, minus_lit0,
5221 MINUS_EXPR, type));
5222 else
5223 {
5224 con0 = associate_trees (con0, minus_lit0,
5225 MINUS_EXPR, type);
5226 return convert (type, associate_trees (var0, con0,
5227 PLUS_EXPR, type));
5228 }
5229 }
5230
5231 con0 = associate_trees (con0, lit0, code, type);
5232 return convert (type, associate_trees (var0, con0, code, type));
5233 }
5234 }
5235
5236 binary:
5237 if (wins)
5238 t1 = const_binop (code, arg0, arg1, 0);
5239 if (t1 != NULL_TREE)
5240 {
5241 /* The return value should always have
5242 the same type as the original expression. */
5243 if (TREE_TYPE (t1) != TREE_TYPE (t))
5244 t1 = convert (TREE_TYPE (t), t1);
5245
5246 return t1;
5247 }
5248 return t;
5249
5250 case MINUS_EXPR:
5251 /* A - (-B) -> A + B */
5252 if (TREE_CODE (arg1) == NEGATE_EXPR)
5253 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5254 /* (-A) - CST -> (-CST) - A for floating point (what about ints ?) */
5255 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
5256 return
5257 fold (build (MINUS_EXPR, type,
5258 build_real (TREE_TYPE (arg1),
5259 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1))),
5260 TREE_OPERAND (arg0, 0)));
5261
5262 if (! FLOAT_TYPE_P (type))
5263 {
5264 if (! wins && integer_zerop (arg0))
5265 return negate_expr (convert (type, arg1));
5266 if (integer_zerop (arg1))
5267 return non_lvalue (convert (type, arg0));
5268
5269 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
5270 about the case where C is a constant, just try one of the
5271 four possibilities. */
5272
5273 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
5274 && operand_equal_p (TREE_OPERAND (arg0, 1),
5275 TREE_OPERAND (arg1, 1), 0))
5276 return fold (build (MULT_EXPR, type,
5277 fold (build (MINUS_EXPR, type,
5278 TREE_OPERAND (arg0, 0),
5279 TREE_OPERAND (arg1, 0))),
5280 TREE_OPERAND (arg0, 1)));
5281 }
5282
5283 /* See if ARG1 is zero and X - ARG1 reduces to X. */
5284 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
5285 return non_lvalue (convert (type, arg0));
5286
5287 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
5288 ARG0 is zero and X + ARG0 reduces to X, since that would mean
5289 (-ARG1 + ARG0) reduces to -ARG1. */
5290 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5291 return negate_expr (convert (type, arg1));
5292
5293 /* Fold &x - &x. This can happen from &x.foo - &x.
5294 This is unsafe for certain floats even in non-IEEE formats.
5295 In IEEE, it is unsafe because it does wrong for NaNs.
5296 Also note that operand_equal_p is always false if an operand
5297 is volatile. */
5298
5299 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
5300 && operand_equal_p (arg0, arg1, 0))
5301 return convert (type, integer_zero_node);
5302
5303 goto associate;
5304
5305 case MULT_EXPR:
5306 /* (-A) * (-B) -> A * B */
5307 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5308 return fold (build (MULT_EXPR, type, TREE_OPERAND (arg0, 0),
5309 TREE_OPERAND (arg1, 0)));
5310
5311 if (! FLOAT_TYPE_P (type))
5312 {
5313 if (integer_zerop (arg1))
5314 return omit_one_operand (type, arg1, arg0);
5315 if (integer_onep (arg1))
5316 return non_lvalue (convert (type, arg0));
5317
5318 /* (a * (1 << b)) is (a << b) */
5319 if (TREE_CODE (arg1) == LSHIFT_EXPR
5320 && integer_onep (TREE_OPERAND (arg1, 0)))
5321 return fold (build (LSHIFT_EXPR, type, arg0,
5322 TREE_OPERAND (arg1, 1)));
5323 if (TREE_CODE (arg0) == LSHIFT_EXPR
5324 && integer_onep (TREE_OPERAND (arg0, 0)))
5325 return fold (build (LSHIFT_EXPR, type, arg1,
5326 TREE_OPERAND (arg0, 1)));
5327
5328 if (TREE_CODE (arg1) == INTEGER_CST
5329 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5330 code, NULL_TREE)))
5331 return convert (type, tem);
5332
5333 }
5334 else
5335 {
5336 /* Maybe fold x * 0 to 0. The expressions aren't the same
5337 when x is NaN, since x * 0 is also NaN. Nor are they the
5338 same in modes with signed zeros, since multiplying a
5339 negative value by 0 gives -0, not +0. */
5340 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
5341 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
5342 && real_zerop (arg1))
5343 return omit_one_operand (type, arg1, arg0);
5344 /* In IEEE floating point, x*1 is not equivalent to x for snans.
5345 However, ANSI says we can drop signals,
5346 so we can do this anyway. */
5347 if (real_onep (arg1))
5348 return non_lvalue (convert (type, arg0));
5349 /* x*2 is x+x */
5350 if (! wins && real_twop (arg1)
5351 && (*lang_hooks.decls.global_bindings_p) () == 0
5352 && ! contains_placeholder_p (arg0))
5353 {
5354 tree arg = save_expr (arg0);
5355 return build (PLUS_EXPR, type, arg, arg);
5356 }
5357 }
5358 goto associate;
5359
5360 case BIT_IOR_EXPR:
5361 bit_ior:
5362 if (integer_all_onesp (arg1))
5363 return omit_one_operand (type, arg1, arg0);
5364 if (integer_zerop (arg1))
5365 return non_lvalue (convert (type, arg0));
5366 t1 = distribute_bit_expr (code, type, arg0, arg1);
5367 if (t1 != NULL_TREE)
5368 return t1;
5369
5370 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
5371
5372 This results in more efficient code for machines without a NAND
5373 instruction. Combine will canonicalize to the first form
5374 which will allow use of NAND instructions provided by the
5375 backend if they exist. */
5376 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5377 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5378 {
5379 return fold (build1 (BIT_NOT_EXPR, type,
5380 build (BIT_AND_EXPR, type,
5381 TREE_OPERAND (arg0, 0),
5382 TREE_OPERAND (arg1, 0))));
5383 }
5384
5385 /* See if this can be simplified into a rotate first. If that
5386 is unsuccessful continue in the association code. */
5387 goto bit_rotate;
5388
5389 case BIT_XOR_EXPR:
5390 if (integer_zerop (arg1))
5391 return non_lvalue (convert (type, arg0));
5392 if (integer_all_onesp (arg1))
5393 return fold (build1 (BIT_NOT_EXPR, type, arg0));
5394
5395 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
5396 with a constant, and the two constants have no bits in common,
5397 we should treat this as a BIT_IOR_EXPR since this may produce more
5398 simplifications. */
5399 if (TREE_CODE (arg0) == BIT_AND_EXPR
5400 && TREE_CODE (arg1) == BIT_AND_EXPR
5401 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5402 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5403 && integer_zerop (const_binop (BIT_AND_EXPR,
5404 TREE_OPERAND (arg0, 1),
5405 TREE_OPERAND (arg1, 1), 0)))
5406 {
5407 code = BIT_IOR_EXPR;
5408 goto bit_ior;
5409 }
5410
5411 /* See if this can be simplified into a rotate first. If that
5412 is unsuccessful continue in the association code. */
5413 goto bit_rotate;
5414
5415 case BIT_AND_EXPR:
5416 bit_and:
5417 if (integer_all_onesp (arg1))
5418 return non_lvalue (convert (type, arg0));
5419 if (integer_zerop (arg1))
5420 return omit_one_operand (type, arg1, arg0);
5421 t1 = distribute_bit_expr (code, type, arg0, arg1);
5422 if (t1 != NULL_TREE)
5423 return t1;
5424 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
5425 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
5426 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5427 {
5428 unsigned int prec
5429 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
5430
5431 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
5432 && (~TREE_INT_CST_LOW (arg1)
5433 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
5434 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
5435 }
5436
5437 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
5438
5439 This results in more efficient code for machines without a NOR
5440 instruction. Combine will canonicalize to the first form
5441 which will allow use of NOR instructions provided by the
5442 backend if they exist. */
5443 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5444 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5445 {
5446 return fold (build1 (BIT_NOT_EXPR, type,
5447 build (BIT_IOR_EXPR, type,
5448 TREE_OPERAND (arg0, 0),
5449 TREE_OPERAND (arg1, 0))));
5450 }
5451
5452 goto associate;
5453
5454 case BIT_ANDTC_EXPR:
5455 if (integer_all_onesp (arg0))
5456 return non_lvalue (convert (type, arg1));
5457 if (integer_zerop (arg0))
5458 return omit_one_operand (type, arg0, arg1);
5459 if (TREE_CODE (arg1) == INTEGER_CST)
5460 {
5461 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
5462 code = BIT_AND_EXPR;
5463 goto bit_and;
5464 }
5465 goto binary;
5466
5467 case RDIV_EXPR:
5468 /* Don't touch a floating-point divide by zero unless the mode
5469 of the constant can represent infinity. */
5470 if (TREE_CODE (arg1) == REAL_CST
5471 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
5472 && real_zerop (arg1))
5473 return t;
5474
5475 /* (-A) / (-B) -> A / B */
5476 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5477 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
5478 TREE_OPERAND (arg1, 0)));
5479
5480 /* In IEEE floating point, x/1 is not equivalent to x for snans.
5481 However, ANSI says we can drop signals, so we can do this anyway. */
5482 if (real_onep (arg1))
5483 return non_lvalue (convert (type, arg0));
5484
5485 /* If ARG1 is a constant, we can convert this to a multiply by the
5486 reciprocal. This does not have the same rounding properties,
5487 so only do this if -funsafe-math-optimizations. We can actually
5488 always safely do it if ARG1 is a power of two, but it's hard to
5489 tell if it is or not in a portable manner. */
5490 if (TREE_CODE (arg1) == REAL_CST)
5491 {
5492 if (flag_unsafe_math_optimizations
5493 && 0 != (tem = const_binop (code, build_real (type, dconst1),
5494 arg1, 0)))
5495 return fold (build (MULT_EXPR, type, arg0, tem));
5496 /* Find the reciprocal if optimizing and the result is exact. */
5497 else if (optimize)
5498 {
5499 REAL_VALUE_TYPE r;
5500 r = TREE_REAL_CST (arg1);
5501 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
5502 {
5503 tem = build_real (type, r);
5504 return fold (build (MULT_EXPR, type, arg0, tem));
5505 }
5506 }
5507 }
5508 /* Convert A/B/C to A/(B*C). */
5509 if (flag_unsafe_math_optimizations
5510 && TREE_CODE (arg0) == RDIV_EXPR)
5511 {
5512 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
5513 build (MULT_EXPR, type, TREE_OPERAND (arg0, 1),
5514 arg1)));
5515 }
5516 /* Convert A/(B/C) to (A/B)*C. */
5517 if (flag_unsafe_math_optimizations
5518 && TREE_CODE (arg1) == RDIV_EXPR)
5519 {
5520 return fold (build (MULT_EXPR, type,
5521 build (RDIV_EXPR, type, arg0,
5522 TREE_OPERAND (arg1, 0)),
5523 TREE_OPERAND (arg1, 1)));
5524 }
5525 goto binary;
5526
5527 case TRUNC_DIV_EXPR:
5528 case ROUND_DIV_EXPR:
5529 case FLOOR_DIV_EXPR:
5530 case CEIL_DIV_EXPR:
5531 case EXACT_DIV_EXPR:
5532 if (integer_onep (arg1))
5533 return non_lvalue (convert (type, arg0));
5534 if (integer_zerop (arg1))
5535 return t;
5536
5537 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
5538 operation, EXACT_DIV_EXPR.
5539
5540 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
5541 At one time others generated faster code, it's not clear if they do
5542 after the last round to changes to the DIV code in expmed.c. */
5543 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
5544 && multiple_of_p (type, arg0, arg1))
5545 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
5546
5547 if (TREE_CODE (arg1) == INTEGER_CST
5548 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5549 code, NULL_TREE)))
5550 return convert (type, tem);
5551
5552 goto binary;
5553
5554 case CEIL_MOD_EXPR:
5555 case FLOOR_MOD_EXPR:
5556 case ROUND_MOD_EXPR:
5557 case TRUNC_MOD_EXPR:
5558 if (integer_onep (arg1))
5559 return omit_one_operand (type, integer_zero_node, arg0);
5560 if (integer_zerop (arg1))
5561 return t;
5562
5563 if (TREE_CODE (arg1) == INTEGER_CST
5564 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5565 code, NULL_TREE)))
5566 return convert (type, tem);
5567
5568 goto binary;
5569
5570 case LSHIFT_EXPR:
5571 case RSHIFT_EXPR:
5572 case LROTATE_EXPR:
5573 case RROTATE_EXPR:
5574 if (integer_zerop (arg1))
5575 return non_lvalue (convert (type, arg0));
5576 /* Since negative shift count is not well-defined,
5577 don't try to compute it in the compiler. */
5578 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
5579 return t;
5580 /* Rewrite an LROTATE_EXPR by a constant into an
5581 RROTATE_EXPR by a new constant. */
5582 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
5583 {
5584 TREE_SET_CODE (t, RROTATE_EXPR);
5585 code = RROTATE_EXPR;
5586 TREE_OPERAND (t, 1) = arg1
5587 = const_binop
5588 (MINUS_EXPR,
5589 convert (TREE_TYPE (arg1),
5590 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
5591 arg1, 0);
5592 if (tree_int_cst_sgn (arg1) < 0)
5593 return t;
5594 }
5595
5596 /* If we have a rotate of a bit operation with the rotate count and
5597 the second operand of the bit operation both constant,
5598 permute the two operations. */
5599 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5600 && (TREE_CODE (arg0) == BIT_AND_EXPR
5601 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
5602 || TREE_CODE (arg0) == BIT_IOR_EXPR
5603 || TREE_CODE (arg0) == BIT_XOR_EXPR)
5604 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
5605 return fold (build (TREE_CODE (arg0), type,
5606 fold (build (code, type,
5607 TREE_OPERAND (arg0, 0), arg1)),
5608 fold (build (code, type,
5609 TREE_OPERAND (arg0, 1), arg1))));
5610
5611 /* Two consecutive rotates adding up to the width of the mode can
5612 be ignored. */
5613 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5614 && TREE_CODE (arg0) == RROTATE_EXPR
5615 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5616 && TREE_INT_CST_HIGH (arg1) == 0
5617 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
5618 && ((TREE_INT_CST_LOW (arg1)
5619 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
5620 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
5621 return TREE_OPERAND (arg0, 0);
5622
5623 goto binary;
5624
5625 case MIN_EXPR:
5626 if (operand_equal_p (arg0, arg1, 0))
5627 return omit_one_operand (type, arg0, arg1);
5628 if (INTEGRAL_TYPE_P (type)
5629 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
5630 return omit_one_operand (type, arg1, arg0);
5631 goto associate;
5632
5633 case MAX_EXPR:
5634 if (operand_equal_p (arg0, arg1, 0))
5635 return omit_one_operand (type, arg0, arg1);
5636 if (INTEGRAL_TYPE_P (type)
5637 && TYPE_MAX_VALUE (type)
5638 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
5639 return omit_one_operand (type, arg1, arg0);
5640 goto associate;
5641
5642 case TRUTH_NOT_EXPR:
5643 /* Note that the operand of this must be an int
5644 and its values must be 0 or 1.
5645 ("true" is a fixed value perhaps depending on the language,
5646 but we don't handle values other than 1 correctly yet.) */
5647 tem = invert_truthvalue (arg0);
5648 /* Avoid infinite recursion. */
5649 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
5650 return t;
5651 return convert (type, tem);
5652
5653 case TRUTH_ANDIF_EXPR:
5654 /* Note that the operands of this must be ints
5655 and their values must be 0 or 1.
5656 ("true" is a fixed value perhaps depending on the language.) */
5657 /* If first arg is constant zero, return it. */
5658 if (integer_zerop (arg0))
5659 return convert (type, arg0);
5660 case TRUTH_AND_EXPR:
5661 /* If either arg is constant true, drop it. */
5662 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5663 return non_lvalue (convert (type, arg1));
5664 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
5665 /* Preserve sequence points. */
5666 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
5667 return non_lvalue (convert (type, arg0));
5668 /* If second arg is constant zero, result is zero, but first arg
5669 must be evaluated. */
5670 if (integer_zerop (arg1))
5671 return omit_one_operand (type, arg1, arg0);
5672 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
5673 case will be handled here. */
5674 if (integer_zerop (arg0))
5675 return omit_one_operand (type, arg0, arg1);
5676
5677 truth_andor:
5678 /* We only do these simplifications if we are optimizing. */
5679 if (!optimize)
5680 return t;
5681
5682 /* Check for things like (A || B) && (A || C). We can convert this
5683 to A || (B && C). Note that either operator can be any of the four
5684 truth and/or operations and the transformation will still be
5685 valid. Also note that we only care about order for the
5686 ANDIF and ORIF operators. If B contains side effects, this
5687 might change the truth-value of A. */
5688 if (TREE_CODE (arg0) == TREE_CODE (arg1)
5689 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
5690 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
5691 || TREE_CODE (arg0) == TRUTH_AND_EXPR
5692 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
5693 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
5694 {
5695 tree a00 = TREE_OPERAND (arg0, 0);
5696 tree a01 = TREE_OPERAND (arg0, 1);
5697 tree a10 = TREE_OPERAND (arg1, 0);
5698 tree a11 = TREE_OPERAND (arg1, 1);
5699 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
5700 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
5701 && (code == TRUTH_AND_EXPR
5702 || code == TRUTH_OR_EXPR));
5703
5704 if (operand_equal_p (a00, a10, 0))
5705 return fold (build (TREE_CODE (arg0), type, a00,
5706 fold (build (code, type, a01, a11))));
5707 else if (commutative && operand_equal_p (a00, a11, 0))
5708 return fold (build (TREE_CODE (arg0), type, a00,
5709 fold (build (code, type, a01, a10))));
5710 else if (commutative && operand_equal_p (a01, a10, 0))
5711 return fold (build (TREE_CODE (arg0), type, a01,
5712 fold (build (code, type, a00, a11))));
5713
5714 /* This case if tricky because we must either have commutative
5715 operators or else A10 must not have side-effects. */
5716
5717 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
5718 && operand_equal_p (a01, a11, 0))
5719 return fold (build (TREE_CODE (arg0), type,
5720 fold (build (code, type, a00, a10)),
5721 a01));
5722 }
5723
5724 /* See if we can build a range comparison. */
5725 if (0 != (tem = fold_range_test (t)))
5726 return tem;
5727
5728 /* Check for the possibility of merging component references. If our
5729 lhs is another similar operation, try to merge its rhs with our
5730 rhs. Then try to merge our lhs and rhs. */
5731 if (TREE_CODE (arg0) == code
5732 && 0 != (tem = fold_truthop (code, type,
5733 TREE_OPERAND (arg0, 1), arg1)))
5734 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
5735
5736 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
5737 return tem;
5738
5739 return t;
5740
5741 case TRUTH_ORIF_EXPR:
5742 /* Note that the operands of this must be ints
5743 and their values must be 0 or true.
5744 ("true" is a fixed value perhaps depending on the language.) */
5745 /* If first arg is constant true, return it. */
5746 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5747 return convert (type, arg0);
5748 case TRUTH_OR_EXPR:
5749 /* If either arg is constant zero, drop it. */
5750 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
5751 return non_lvalue (convert (type, arg1));
5752 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
5753 /* Preserve sequence points. */
5754 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
5755 return non_lvalue (convert (type, arg0));
5756 /* If second arg is constant true, result is true, but we must
5757 evaluate first arg. */
5758 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
5759 return omit_one_operand (type, arg1, arg0);
5760 /* Likewise for first arg, but note this only occurs here for
5761 TRUTH_OR_EXPR. */
5762 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5763 return omit_one_operand (type, arg0, arg1);
5764 goto truth_andor;
5765
5766 case TRUTH_XOR_EXPR:
5767 /* If either arg is constant zero, drop it. */
5768 if (integer_zerop (arg0))
5769 return non_lvalue (convert (type, arg1));
5770 if (integer_zerop (arg1))
5771 return non_lvalue (convert (type, arg0));
5772 /* If either arg is constant true, this is a logical inversion. */
5773 if (integer_onep (arg0))
5774 return non_lvalue (convert (type, invert_truthvalue (arg1)));
5775 if (integer_onep (arg1))
5776 return non_lvalue (convert (type, invert_truthvalue (arg0)));
5777 return t;
5778
5779 case EQ_EXPR:
5780 case NE_EXPR:
5781 case LT_EXPR:
5782 case GT_EXPR:
5783 case LE_EXPR:
5784 case GE_EXPR:
5785 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
5786 {
5787 /* (-a) CMP (-b) -> b CMP a */
5788 if (TREE_CODE (arg0) == NEGATE_EXPR
5789 && TREE_CODE (arg1) == NEGATE_EXPR)
5790 return fold (build (code, type, TREE_OPERAND (arg1, 0),
5791 TREE_OPERAND (arg0, 0)));
5792 /* (-a) CMP CST -> a swap(CMP) (-CST) */
5793 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
5794 return
5795 fold (build
5796 (swap_tree_comparison (code), type,
5797 TREE_OPERAND (arg0, 0),
5798 build_real (TREE_TYPE (arg1),
5799 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1)))));
5800 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
5801 /* a CMP (-0) -> a CMP 0 */
5802 if (TREE_CODE (arg1) == REAL_CST
5803 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (arg1)))
5804 return fold (build (code, type, arg0,
5805 build_real (TREE_TYPE (arg1), dconst0)));
5806 }
5807
5808 /* If one arg is a constant integer, put it last. */
5809 if (TREE_CODE (arg0) == INTEGER_CST
5810 && TREE_CODE (arg1) != INTEGER_CST)
5811 {
5812 TREE_OPERAND (t, 0) = arg1;
5813 TREE_OPERAND (t, 1) = arg0;
5814 arg0 = TREE_OPERAND (t, 0);
5815 arg1 = TREE_OPERAND (t, 1);
5816 code = swap_tree_comparison (code);
5817 TREE_SET_CODE (t, code);
5818 }
5819
5820 /* Convert foo++ == CONST into ++foo == CONST + INCR.
5821 First, see if one arg is constant; find the constant arg
5822 and the other one. */
5823 {
5824 tree constop = 0, varop = NULL_TREE;
5825 int constopnum = -1;
5826
5827 if (TREE_CONSTANT (arg1))
5828 constopnum = 1, constop = arg1, varop = arg0;
5829 if (TREE_CONSTANT (arg0))
5830 constopnum = 0, constop = arg0, varop = arg1;
5831
5832 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
5833 {
5834 /* This optimization is invalid for ordered comparisons
5835 if CONST+INCR overflows or if foo+incr might overflow.
5836 This optimization is invalid for floating point due to rounding.
5837 For pointer types we assume overflow doesn't happen. */
5838 if (POINTER_TYPE_P (TREE_TYPE (varop))
5839 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
5840 && (code == EQ_EXPR || code == NE_EXPR)))
5841 {
5842 tree newconst
5843 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
5844 constop, TREE_OPERAND (varop, 1)));
5845
5846 /* Do not overwrite the current varop to be a preincrement,
5847 create a new node so that we won't confuse our caller who
5848 might create trees and throw them away, reusing the
5849 arguments that they passed to build. This shows up in
5850 the THEN or ELSE parts of ?: being postincrements. */
5851 varop = build (PREINCREMENT_EXPR, TREE_TYPE (varop),
5852 TREE_OPERAND (varop, 0),
5853 TREE_OPERAND (varop, 1));
5854
5855 /* If VAROP is a reference to a bitfield, we must mask
5856 the constant by the width of the field. */
5857 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
5858 && DECL_BIT_FIELD(TREE_OPERAND
5859 (TREE_OPERAND (varop, 0), 1)))
5860 {
5861 int size
5862 = TREE_INT_CST_LOW (DECL_SIZE
5863 (TREE_OPERAND
5864 (TREE_OPERAND (varop, 0), 1)));
5865 tree mask, unsigned_type;
5866 unsigned int precision;
5867 tree folded_compare;
5868
5869 /* First check whether the comparison would come out
5870 always the same. If we don't do that we would
5871 change the meaning with the masking. */
5872 if (constopnum == 0)
5873 folded_compare = fold (build (code, type, constop,
5874 TREE_OPERAND (varop, 0)));
5875 else
5876 folded_compare = fold (build (code, type,
5877 TREE_OPERAND (varop, 0),
5878 constop));
5879 if (integer_zerop (folded_compare)
5880 || integer_onep (folded_compare))
5881 return omit_one_operand (type, folded_compare, varop);
5882
5883 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
5884 precision = TYPE_PRECISION (unsigned_type);
5885 mask = build_int_2 (~0, ~0);
5886 TREE_TYPE (mask) = unsigned_type;
5887 force_fit_type (mask, 0);
5888 mask = const_binop (RSHIFT_EXPR, mask,
5889 size_int (precision - size), 0);
5890 newconst = fold (build (BIT_AND_EXPR,
5891 TREE_TYPE (varop), newconst,
5892 convert (TREE_TYPE (varop),
5893 mask)));
5894 }
5895
5896 t = build (code, type,
5897 (constopnum == 0) ? newconst : varop,
5898 (constopnum == 1) ? newconst : varop);
5899 return t;
5900 }
5901 }
5902 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
5903 {
5904 if (POINTER_TYPE_P (TREE_TYPE (varop))
5905 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
5906 && (code == EQ_EXPR || code == NE_EXPR)))
5907 {
5908 tree newconst
5909 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
5910 constop, TREE_OPERAND (varop, 1)));
5911
5912 /* Do not overwrite the current varop to be a predecrement,
5913 create a new node so that we won't confuse our caller who
5914 might create trees and throw them away, reusing the
5915 arguments that they passed to build. This shows up in
5916 the THEN or ELSE parts of ?: being postdecrements. */
5917 varop = build (PREDECREMENT_EXPR, TREE_TYPE (varop),
5918 TREE_OPERAND (varop, 0),
5919 TREE_OPERAND (varop, 1));
5920
5921 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
5922 && DECL_BIT_FIELD(TREE_OPERAND
5923 (TREE_OPERAND (varop, 0), 1)))
5924 {
5925 int size
5926 = TREE_INT_CST_LOW (DECL_SIZE
5927 (TREE_OPERAND
5928 (TREE_OPERAND (varop, 0), 1)));
5929 tree mask, unsigned_type;
5930 unsigned int precision;
5931 tree folded_compare;
5932
5933 if (constopnum == 0)
5934 folded_compare = fold (build (code, type, constop,
5935 TREE_OPERAND (varop, 0)));
5936 else
5937 folded_compare = fold (build (code, type,
5938 TREE_OPERAND (varop, 0),
5939 constop));
5940 if (integer_zerop (folded_compare)
5941 || integer_onep (folded_compare))
5942 return omit_one_operand (type, folded_compare, varop);
5943
5944 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
5945 precision = TYPE_PRECISION (unsigned_type);
5946 mask = build_int_2 (~0, ~0);
5947 TREE_TYPE (mask) = TREE_TYPE (varop);
5948 force_fit_type (mask, 0);
5949 mask = const_binop (RSHIFT_EXPR, mask,
5950 size_int (precision - size), 0);
5951 newconst = fold (build (BIT_AND_EXPR,
5952 TREE_TYPE (varop), newconst,
5953 convert (TREE_TYPE (varop),
5954 mask)));
5955 }
5956
5957 t = build (code, type,
5958 (constopnum == 0) ? newconst : varop,
5959 (constopnum == 1) ? newconst : varop);
5960 return t;
5961 }
5962 }
5963 }
5964
5965 /* Change X >= CST to X > (CST - 1) if CST is positive. */
5966 if (TREE_CODE (arg1) == INTEGER_CST
5967 && TREE_CODE (arg0) != INTEGER_CST
5968 && tree_int_cst_sgn (arg1) > 0)
5969 {
5970 switch (TREE_CODE (t))
5971 {
5972 case GE_EXPR:
5973 code = GT_EXPR;
5974 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
5975 t = build (code, type, TREE_OPERAND (t, 0), arg1);
5976 break;
5977
5978 case LT_EXPR:
5979 code = LE_EXPR;
5980 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
5981 t = build (code, type, TREE_OPERAND (t, 0), arg1);
5982 break;
5983
5984 default:
5985 break;
5986 }
5987 }
5988
5989 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
5990 a MINUS_EXPR of a constant, we can convert it into a comparison with
5991 a revised constant as long as no overflow occurs. */
5992 if ((code == EQ_EXPR || code == NE_EXPR)
5993 && TREE_CODE (arg1) == INTEGER_CST
5994 && (TREE_CODE (arg0) == PLUS_EXPR
5995 || TREE_CODE (arg0) == MINUS_EXPR)
5996 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5997 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
5998 ? MINUS_EXPR : PLUS_EXPR,
5999 arg1, TREE_OPERAND (arg0, 1), 0))
6000 && ! TREE_CONSTANT_OVERFLOW (tem))
6001 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6002
6003 /* Similarly for a NEGATE_EXPR. */
6004 else if ((code == EQ_EXPR || code == NE_EXPR)
6005 && TREE_CODE (arg0) == NEGATE_EXPR
6006 && TREE_CODE (arg1) == INTEGER_CST
6007 && 0 != (tem = negate_expr (arg1))
6008 && TREE_CODE (tem) == INTEGER_CST
6009 && ! TREE_CONSTANT_OVERFLOW (tem))
6010 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6011
6012 /* If we have X - Y == 0, we can convert that to X == Y and similarly
6013 for !=. Don't do this for ordered comparisons due to overflow. */
6014 else if ((code == NE_EXPR || code == EQ_EXPR)
6015 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
6016 return fold (build (code, type,
6017 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
6018
6019 /* If we are widening one operand of an integer comparison,
6020 see if the other operand is similarly being widened. Perhaps we
6021 can do the comparison in the narrower type. */
6022 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
6023 && TREE_CODE (arg0) == NOP_EXPR
6024 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
6025 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
6026 && (TREE_TYPE (t1) == TREE_TYPE (tem)
6027 || (TREE_CODE (t1) == INTEGER_CST
6028 && int_fits_type_p (t1, TREE_TYPE (tem)))))
6029 return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
6030
6031 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
6032 constant, we can simplify it. */
6033 else if (TREE_CODE (arg1) == INTEGER_CST
6034 && (TREE_CODE (arg0) == MIN_EXPR
6035 || TREE_CODE (arg0) == MAX_EXPR)
6036 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6037 return optimize_minmax_comparison (t);
6038
6039 /* If we are comparing an ABS_EXPR with a constant, we can
6040 convert all the cases into explicit comparisons, but they may
6041 well not be faster than doing the ABS and one comparison.
6042 But ABS (X) <= C is a range comparison, which becomes a subtraction
6043 and a comparison, and is probably faster. */
6044 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6045 && TREE_CODE (arg0) == ABS_EXPR
6046 && ! TREE_SIDE_EFFECTS (arg0)
6047 && (0 != (tem = negate_expr (arg1)))
6048 && TREE_CODE (tem) == INTEGER_CST
6049 && ! TREE_CONSTANT_OVERFLOW (tem))
6050 return fold (build (TRUTH_ANDIF_EXPR, type,
6051 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
6052 build (LE_EXPR, type,
6053 TREE_OPERAND (arg0, 0), arg1)));
6054
6055 /* If this is an EQ or NE comparison with zero and ARG0 is
6056 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
6057 two operations, but the latter can be done in one less insn
6058 on machines that have only two-operand insns or on which a
6059 constant cannot be the first operand. */
6060 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
6061 && TREE_CODE (arg0) == BIT_AND_EXPR)
6062 {
6063 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
6064 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
6065 return
6066 fold (build (code, type,
6067 build (BIT_AND_EXPR, TREE_TYPE (arg0),
6068 build (RSHIFT_EXPR,
6069 TREE_TYPE (TREE_OPERAND (arg0, 0)),
6070 TREE_OPERAND (arg0, 1),
6071 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
6072 convert (TREE_TYPE (arg0),
6073 integer_one_node)),
6074 arg1));
6075 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
6076 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
6077 return
6078 fold (build (code, type,
6079 build (BIT_AND_EXPR, TREE_TYPE (arg0),
6080 build (RSHIFT_EXPR,
6081 TREE_TYPE (TREE_OPERAND (arg0, 1)),
6082 TREE_OPERAND (arg0, 0),
6083 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
6084 convert (TREE_TYPE (arg0),
6085 integer_one_node)),
6086 arg1));
6087 }
6088
6089 /* If this is an NE or EQ comparison of zero against the result of a
6090 signed MOD operation whose second operand is a power of 2, make
6091 the MOD operation unsigned since it is simpler and equivalent. */
6092 if ((code == NE_EXPR || code == EQ_EXPR)
6093 && integer_zerop (arg1)
6094 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
6095 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
6096 || TREE_CODE (arg0) == CEIL_MOD_EXPR
6097 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
6098 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
6099 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6100 {
6101 tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
6102 tree newmod = build (TREE_CODE (arg0), newtype,
6103 convert (newtype, TREE_OPERAND (arg0, 0)),
6104 convert (newtype, TREE_OPERAND (arg0, 1)));
6105
6106 return build (code, type, newmod, convert (newtype, arg1));
6107 }
6108
6109 /* If this is an NE comparison of zero with an AND of one, remove the
6110 comparison since the AND will give the correct value. */
6111 if (code == NE_EXPR && integer_zerop (arg1)
6112 && TREE_CODE (arg0) == BIT_AND_EXPR
6113 && integer_onep (TREE_OPERAND (arg0, 1)))
6114 return convert (type, arg0);
6115
6116 /* If we have (A & C) == C where C is a power of 2, convert this into
6117 (A & C) != 0. Similarly for NE_EXPR. */
6118 if ((code == EQ_EXPR || code == NE_EXPR)
6119 && TREE_CODE (arg0) == BIT_AND_EXPR
6120 && integer_pow2p (TREE_OPERAND (arg0, 1))
6121 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
6122 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
6123 arg0, integer_zero_node));
6124
6125 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6126 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6127 if ((code == EQ_EXPR || code == NE_EXPR)
6128 && TREE_CODE (arg0) == BIT_AND_EXPR
6129 && integer_zerop (arg1))
6130 {
6131 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0),
6132 TREE_OPERAND (arg0, 1));
6133 if (arg00 != NULL_TREE)
6134 {
6135 tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
6136 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
6137 convert (stype, arg00),
6138 convert (stype, integer_zero_node)));
6139 }
6140 }
6141
6142 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
6143 and similarly for >= into !=. */
6144 if ((code == LT_EXPR || code == GE_EXPR)
6145 && TREE_UNSIGNED (TREE_TYPE (arg0))
6146 && TREE_CODE (arg1) == LSHIFT_EXPR
6147 && integer_onep (TREE_OPERAND (arg1, 0)))
6148 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6149 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6150 TREE_OPERAND (arg1, 1)),
6151 convert (TREE_TYPE (arg0), integer_zero_node));
6152
6153 else if ((code == LT_EXPR || code == GE_EXPR)
6154 && TREE_UNSIGNED (TREE_TYPE (arg0))
6155 && (TREE_CODE (arg1) == NOP_EXPR
6156 || TREE_CODE (arg1) == CONVERT_EXPR)
6157 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
6158 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
6159 return
6160 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6161 convert (TREE_TYPE (arg0),
6162 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6163 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
6164 convert (TREE_TYPE (arg0), integer_zero_node));
6165
6166 /* Simplify comparison of something with itself. (For IEEE
6167 floating-point, we can only do some of these simplifications.) */
6168 if (operand_equal_p (arg0, arg1, 0))
6169 {
6170 switch (code)
6171 {
6172 case EQ_EXPR:
6173 case GE_EXPR:
6174 case LE_EXPR:
6175 if (! FLOAT_TYPE_P (TREE_TYPE (arg0)))
6176 return constant_boolean_node (1, type);
6177 code = EQ_EXPR;
6178 TREE_SET_CODE (t, code);
6179 break;
6180
6181 case NE_EXPR:
6182 /* For NE, we can only do this simplification if integer. */
6183 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
6184 break;
6185 /* ... fall through ... */
6186 case GT_EXPR:
6187 case LT_EXPR:
6188 return constant_boolean_node (0, type);
6189 default:
6190 abort ();
6191 }
6192 }
6193
6194 /* An unsigned comparison against 0 can be simplified. */
6195 if (integer_zerop (arg1)
6196 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6197 || POINTER_TYPE_P (TREE_TYPE (arg1)))
6198 && TREE_UNSIGNED (TREE_TYPE (arg1)))
6199 {
6200 switch (TREE_CODE (t))
6201 {
6202 case GT_EXPR:
6203 code = NE_EXPR;
6204 TREE_SET_CODE (t, NE_EXPR);
6205 break;
6206 case LE_EXPR:
6207 code = EQ_EXPR;
6208 TREE_SET_CODE (t, EQ_EXPR);
6209 break;
6210 case GE_EXPR:
6211 return omit_one_operand (type,
6212 convert (type, integer_one_node),
6213 arg0);
6214 case LT_EXPR:
6215 return omit_one_operand (type,
6216 convert (type, integer_zero_node),
6217 arg0);
6218 default:
6219 break;
6220 }
6221 }
6222
6223 /* Comparisons with the highest or lowest possible integer of
6224 the specified size will have known values and an unsigned
6225 <= 0x7fffffff can be simplified. */
6226 {
6227 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
6228
6229 if (TREE_CODE (arg1) == INTEGER_CST
6230 && ! TREE_CONSTANT_OVERFLOW (arg1)
6231 && width <= HOST_BITS_PER_WIDE_INT
6232 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6233 || POINTER_TYPE_P (TREE_TYPE (arg1))))
6234 {
6235 if (TREE_INT_CST_HIGH (arg1) == 0
6236 && (TREE_INT_CST_LOW (arg1)
6237 == ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1)
6238 && ! TREE_UNSIGNED (TREE_TYPE (arg1)))
6239 switch (TREE_CODE (t))
6240 {
6241 case GT_EXPR:
6242 return omit_one_operand (type,
6243 convert (type, integer_zero_node),
6244 arg0);
6245 case GE_EXPR:
6246 TREE_SET_CODE (t, EQ_EXPR);
6247 break;
6248
6249 case LE_EXPR:
6250 return omit_one_operand (type,
6251 convert (type, integer_one_node),
6252 arg0);
6253 case LT_EXPR:
6254 TREE_SET_CODE (t, NE_EXPR);
6255 break;
6256
6257 default:
6258 break;
6259 }
6260
6261 else if (TREE_INT_CST_HIGH (arg1) == -1
6262 && (TREE_INT_CST_LOW (arg1)
6263 == ((unsigned HOST_WIDE_INT) 1 << (width - 1)))
6264 && ! TREE_UNSIGNED (TREE_TYPE (arg1)))
6265 switch (TREE_CODE (t))
6266 {
6267 case LT_EXPR:
6268 return omit_one_operand (type,
6269 convert (type, integer_zero_node),
6270 arg0);
6271 case LE_EXPR:
6272 TREE_SET_CODE (t, EQ_EXPR);
6273 break;
6274
6275 case GE_EXPR:
6276 return omit_one_operand (type,
6277 convert (type, integer_one_node),
6278 arg0);
6279 case GT_EXPR:
6280 TREE_SET_CODE (t, NE_EXPR);
6281 break;
6282
6283 default:
6284 break;
6285 }
6286
6287 else if (TREE_INT_CST_HIGH (arg1) == 0
6288 && (TREE_INT_CST_LOW (arg1)
6289 == ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1)
6290 && TREE_UNSIGNED (TREE_TYPE (arg1))
6291 /* signed_type does not work on pointer types. */
6292 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
6293 {
6294 if (TREE_CODE (t) == LE_EXPR || TREE_CODE (t) == GT_EXPR)
6295 {
6296 tree st0, st1;
6297 st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
6298 st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
6299 return fold
6300 (build (TREE_CODE (t) == LE_EXPR ? GE_EXPR: LT_EXPR,
6301 type, convert (st0, arg0),
6302 convert (st1, integer_zero_node)));
6303 }
6304 }
6305 else if (TREE_INT_CST_HIGH (arg1) == 0
6306 && (TREE_INT_CST_LOW (arg1)
6307 == ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1)
6308 && TREE_UNSIGNED (TREE_TYPE (arg1)))
6309 switch (TREE_CODE (t))
6310 {
6311 case GT_EXPR:
6312 return omit_one_operand (type,
6313 convert (type, integer_zero_node),
6314 arg0);
6315 case GE_EXPR:
6316 TREE_SET_CODE (t, EQ_EXPR);
6317 break;
6318
6319 case LE_EXPR:
6320 return omit_one_operand (type,
6321 convert (type, integer_one_node),
6322 arg0);
6323 case LT_EXPR:
6324 TREE_SET_CODE (t, NE_EXPR);
6325 break;
6326
6327 default:
6328 break;
6329 }
6330 }
6331 }
6332
6333 /* If we are comparing an expression that just has comparisons
6334 of two integer values, arithmetic expressions of those comparisons,
6335 and constants, we can simplify it. There are only three cases
6336 to check: the two values can either be equal, the first can be
6337 greater, or the second can be greater. Fold the expression for
6338 those three values. Since each value must be 0 or 1, we have
6339 eight possibilities, each of which corresponds to the constant 0
6340 or 1 or one of the six possible comparisons.
6341
6342 This handles common cases like (a > b) == 0 but also handles
6343 expressions like ((x > y) - (y > x)) > 0, which supposedly
6344 occur in macroized code. */
6345
6346 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
6347 {
6348 tree cval1 = 0, cval2 = 0;
6349 int save_p = 0;
6350
6351 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
6352 /* Don't handle degenerate cases here; they should already
6353 have been handled anyway. */
6354 && cval1 != 0 && cval2 != 0
6355 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
6356 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
6357 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
6358 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
6359 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
6360 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
6361 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
6362 {
6363 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
6364 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
6365
6366 /* We can't just pass T to eval_subst in case cval1 or cval2
6367 was the same as ARG1. */
6368
6369 tree high_result
6370 = fold (build (code, type,
6371 eval_subst (arg0, cval1, maxval, cval2, minval),
6372 arg1));
6373 tree equal_result
6374 = fold (build (code, type,
6375 eval_subst (arg0, cval1, maxval, cval2, maxval),
6376 arg1));
6377 tree low_result
6378 = fold (build (code, type,
6379 eval_subst (arg0, cval1, minval, cval2, maxval),
6380 arg1));
6381
6382 /* All three of these results should be 0 or 1. Confirm they
6383 are. Then use those values to select the proper code
6384 to use. */
6385
6386 if ((integer_zerop (high_result)
6387 || integer_onep (high_result))
6388 && (integer_zerop (equal_result)
6389 || integer_onep (equal_result))
6390 && (integer_zerop (low_result)
6391 || integer_onep (low_result)))
6392 {
6393 /* Make a 3-bit mask with the high-order bit being the
6394 value for `>', the next for '=', and the low for '<'. */
6395 switch ((integer_onep (high_result) * 4)
6396 + (integer_onep (equal_result) * 2)
6397 + integer_onep (low_result))
6398 {
6399 case 0:
6400 /* Always false. */
6401 return omit_one_operand (type, integer_zero_node, arg0);
6402 case 1:
6403 code = LT_EXPR;
6404 break;
6405 case 2:
6406 code = EQ_EXPR;
6407 break;
6408 case 3:
6409 code = LE_EXPR;
6410 break;
6411 case 4:
6412 code = GT_EXPR;
6413 break;
6414 case 5:
6415 code = NE_EXPR;
6416 break;
6417 case 6:
6418 code = GE_EXPR;
6419 break;
6420 case 7:
6421 /* Always true. */
6422 return omit_one_operand (type, integer_one_node, arg0);
6423 }
6424
6425 t = build (code, type, cval1, cval2);
6426 if (save_p)
6427 return save_expr (t);
6428 else
6429 return fold (t);
6430 }
6431 }
6432 }
6433
6434 /* If this is a comparison of a field, we may be able to simplify it. */
6435 if ((TREE_CODE (arg0) == COMPONENT_REF
6436 || TREE_CODE (arg0) == BIT_FIELD_REF)
6437 && (code == EQ_EXPR || code == NE_EXPR)
6438 /* Handle the constant case even without -O
6439 to make sure the warnings are given. */
6440 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
6441 {
6442 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
6443 return t1 ? t1 : t;
6444 }
6445
6446 /* If this is a comparison of complex values and either or both sides
6447 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
6448 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
6449 This may prevent needless evaluations. */
6450 if ((code == EQ_EXPR || code == NE_EXPR)
6451 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
6452 && (TREE_CODE (arg0) == COMPLEX_EXPR
6453 || TREE_CODE (arg1) == COMPLEX_EXPR
6454 || TREE_CODE (arg0) == COMPLEX_CST
6455 || TREE_CODE (arg1) == COMPLEX_CST))
6456 {
6457 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
6458 tree real0, imag0, real1, imag1;
6459
6460 arg0 = save_expr (arg0);
6461 arg1 = save_expr (arg1);
6462 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
6463 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
6464 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
6465 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
6466
6467 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
6468 : TRUTH_ORIF_EXPR),
6469 type,
6470 fold (build (code, type, real0, real1)),
6471 fold (build (code, type, imag0, imag1))));
6472 }
6473
6474 /* Optimize comparisons of strlen vs zero to a compare of the
6475 first character of the string vs zero. To wit,
6476 strlen(ptr) == 0 => *ptr == 0
6477 strlen(ptr) != 0 => *ptr != 0
6478 Other cases should reduce to one of these two (or a constant)
6479 due to the return value of strlen being unsigned. */
6480 if ((code == EQ_EXPR || code == NE_EXPR)
6481 && integer_zerop (arg1)
6482 && TREE_CODE (arg0) == CALL_EXPR
6483 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR)
6484 {
6485 tree fndecl = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6486 tree arglist;
6487
6488 if (TREE_CODE (fndecl) == FUNCTION_DECL
6489 && DECL_BUILT_IN (fndecl)
6490 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
6491 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
6492 && (arglist = TREE_OPERAND (arg0, 1))
6493 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
6494 && ! TREE_CHAIN (arglist))
6495 return fold (build (code, type,
6496 build1 (INDIRECT_REF, char_type_node,
6497 TREE_VALUE(arglist)),
6498 integer_zero_node));
6499 }
6500
6501 /* From here on, the only cases we handle are when the result is
6502 known to be a constant.
6503
6504 To compute GT, swap the arguments and do LT.
6505 To compute GE, do LT and invert the result.
6506 To compute LE, swap the arguments, do LT and invert the result.
6507 To compute NE, do EQ and invert the result.
6508
6509 Therefore, the code below must handle only EQ and LT. */
6510
6511 if (code == LE_EXPR || code == GT_EXPR)
6512 {
6513 tem = arg0, arg0 = arg1, arg1 = tem;
6514 code = swap_tree_comparison (code);
6515 }
6516
6517 /* Note that it is safe to invert for real values here because we
6518 will check below in the one case that it matters. */
6519
6520 t1 = NULL_TREE;
6521 invert = 0;
6522 if (code == NE_EXPR || code == GE_EXPR)
6523 {
6524 invert = 1;
6525 code = invert_tree_comparison (code);
6526 }
6527
6528 /* Compute a result for LT or EQ if args permit;
6529 otherwise return T. */
6530 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
6531 {
6532 if (code == EQ_EXPR)
6533 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
6534 else
6535 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
6536 ? INT_CST_LT_UNSIGNED (arg0, arg1)
6537 : INT_CST_LT (arg0, arg1)),
6538 0);
6539 }
6540
6541 #if 0 /* This is no longer useful, but breaks some real code. */
6542 /* Assume a nonexplicit constant cannot equal an explicit one,
6543 since such code would be undefined anyway.
6544 Exception: on sysvr4, using #pragma weak,
6545 a label can come out as 0. */
6546 else if (TREE_CODE (arg1) == INTEGER_CST
6547 && !integer_zerop (arg1)
6548 && TREE_CONSTANT (arg0)
6549 && TREE_CODE (arg0) == ADDR_EXPR
6550 && code == EQ_EXPR)
6551 t1 = build_int_2 (0, 0);
6552 #endif
6553 /* Two real constants can be compared explicitly. */
6554 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
6555 {
6556 /* If either operand is a NaN, the result is false with two
6557 exceptions: First, an NE_EXPR is true on NaNs, but that case
6558 is already handled correctly since we will be inverting the
6559 result for NE_EXPR. Second, if we had inverted a LE_EXPR
6560 or a GE_EXPR into a LT_EXPR, we must return true so that it
6561 will be inverted into false. */
6562
6563 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
6564 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
6565 t1 = build_int_2 (invert && code == LT_EXPR, 0);
6566
6567 else if (code == EQ_EXPR)
6568 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
6569 TREE_REAL_CST (arg1)),
6570 0);
6571 else
6572 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
6573 TREE_REAL_CST (arg1)),
6574 0);
6575 }
6576
6577 if (t1 == NULL_TREE)
6578 return t;
6579
6580 if (invert)
6581 TREE_INT_CST_LOW (t1) ^= 1;
6582
6583 TREE_TYPE (t1) = type;
6584 if (TREE_CODE (type) == BOOLEAN_TYPE)
6585 return (*lang_hooks.truthvalue_conversion) (t1);
6586 return t1;
6587
6588 case COND_EXPR:
6589 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
6590 so all simple results must be passed through pedantic_non_lvalue. */
6591 if (TREE_CODE (arg0) == INTEGER_CST)
6592 return pedantic_non_lvalue
6593 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
6594 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
6595 return pedantic_omit_one_operand (type, arg1, arg0);
6596
6597 /* If the second operand is zero, invert the comparison and swap
6598 the second and third operands. Likewise if the second operand
6599 is constant and the third is not or if the third operand is
6600 equivalent to the first operand of the comparison. */
6601
6602 if (integer_zerop (arg1)
6603 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
6604 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6605 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6606 TREE_OPERAND (t, 2),
6607 TREE_OPERAND (arg0, 1))))
6608 {
6609 /* See if this can be inverted. If it can't, possibly because
6610 it was a floating-point inequality comparison, don't do
6611 anything. */
6612 tem = invert_truthvalue (arg0);
6613
6614 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
6615 {
6616 t = build (code, type, tem,
6617 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
6618 arg0 = tem;
6619 /* arg1 should be the first argument of the new T. */
6620 arg1 = TREE_OPERAND (t, 1);
6621 STRIP_NOPS (arg1);
6622 }
6623 }
6624
6625 /* If we have A op B ? A : C, we may be able to convert this to a
6626 simpler expression, depending on the operation and the values
6627 of B and C. Signed zeros prevent all of these transformations,
6628 for reasons given above each one. */
6629
6630 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6631 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6632 arg1, TREE_OPERAND (arg0, 1))
6633 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
6634 {
6635 tree arg2 = TREE_OPERAND (t, 2);
6636 enum tree_code comp_code = TREE_CODE (arg0);
6637
6638 STRIP_NOPS (arg2);
6639
6640 /* If we have A op 0 ? A : -A, consider applying the following
6641 transformations:
6642
6643 A == 0? A : -A same as -A
6644 A != 0? A : -A same as A
6645 A >= 0? A : -A same as abs (A)
6646 A > 0? A : -A same as abs (A)
6647 A <= 0? A : -A same as -abs (A)
6648 A < 0? A : -A same as -abs (A)
6649
6650 None of these transformations work for modes with signed
6651 zeros. If A is +/-0, the first two transformations will
6652 change the sign of the result (from +0 to -0, or vice
6653 versa). The last four will fix the sign of the result,
6654 even though the original expressions could be positive or
6655 negative, depending on the sign of A.
6656
6657 Note that all these transformations are correct if A is
6658 NaN, since the two alternatives (A and -A) are also NaNs. */
6659 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
6660 ? real_zerop (TREE_OPERAND (arg0, 1))
6661 : integer_zerop (TREE_OPERAND (arg0, 1)))
6662 && TREE_CODE (arg2) == NEGATE_EXPR
6663 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
6664 switch (comp_code)
6665 {
6666 case EQ_EXPR:
6667 return
6668 pedantic_non_lvalue
6669 (convert (type,
6670 negate_expr
6671 (convert (TREE_TYPE (TREE_OPERAND (t, 1)),
6672 arg1))));
6673 case NE_EXPR:
6674 return pedantic_non_lvalue (convert (type, arg1));
6675 case GE_EXPR:
6676 case GT_EXPR:
6677 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6678 arg1 = convert ((*lang_hooks.types.signed_type)
6679 (TREE_TYPE (arg1)), arg1);
6680 return pedantic_non_lvalue
6681 (convert (type, fold (build1 (ABS_EXPR,
6682 TREE_TYPE (arg1), arg1))));
6683 case LE_EXPR:
6684 case LT_EXPR:
6685 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6686 arg1 = convert ((lang_hooks.types.signed_type)
6687 (TREE_TYPE (arg1)), arg1);
6688 return pedantic_non_lvalue
6689 (negate_expr (convert (type,
6690 fold (build1 (ABS_EXPR,
6691 TREE_TYPE (arg1),
6692 arg1)))));
6693 default:
6694 abort ();
6695 }
6696
6697 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
6698 A == 0 ? A : 0 is always 0 unless A is -0. Note that
6699 both transformations are correct when A is NaN: A != 0
6700 is then true, and A == 0 is false. */
6701
6702 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
6703 {
6704 if (comp_code == NE_EXPR)
6705 return pedantic_non_lvalue (convert (type, arg1));
6706 else if (comp_code == EQ_EXPR)
6707 return pedantic_non_lvalue (convert (type, integer_zero_node));
6708 }
6709
6710 /* Try some transformations of A op B ? A : B.
6711
6712 A == B? A : B same as B
6713 A != B? A : B same as A
6714 A >= B? A : B same as max (A, B)
6715 A > B? A : B same as max (B, A)
6716 A <= B? A : B same as min (A, B)
6717 A < B? A : B same as min (B, A)
6718
6719 As above, these transformations don't work in the presence
6720 of signed zeros. For example, if A and B are zeros of
6721 opposite sign, the first two transformations will change
6722 the sign of the result. In the last four, the original
6723 expressions give different results for (A=+0, B=-0) and
6724 (A=-0, B=+0), but the transformed expressions do not.
6725
6726 The first two transformations are correct if either A or B
6727 is a NaN. In the first transformation, the condition will
6728 be false, and B will indeed be chosen. In the case of the
6729 second transformation, the condition A != B will be true,
6730 and A will be chosen.
6731
6732 The conversions to max() and min() are not correct if B is
6733 a number and A is not. The conditions in the original
6734 expressions will be false, so all four give B. The min()
6735 and max() versions would give a NaN instead. */
6736 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
6737 arg2, TREE_OPERAND (arg0, 0)))
6738 {
6739 tree comp_op0 = TREE_OPERAND (arg0, 0);
6740 tree comp_op1 = TREE_OPERAND (arg0, 1);
6741 tree comp_type = TREE_TYPE (comp_op0);
6742
6743 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
6744 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
6745 comp_type = type;
6746
6747 switch (comp_code)
6748 {
6749 case EQ_EXPR:
6750 return pedantic_non_lvalue (convert (type, arg2));
6751 case NE_EXPR:
6752 return pedantic_non_lvalue (convert (type, arg1));
6753 case LE_EXPR:
6754 case LT_EXPR:
6755 /* In C++ a ?: expression can be an lvalue, so put the
6756 operand which will be used if they are equal first
6757 so that we can convert this back to the
6758 corresponding COND_EXPR. */
6759 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
6760 return pedantic_non_lvalue
6761 (convert (type, fold (build (MIN_EXPR, comp_type,
6762 (comp_code == LE_EXPR
6763 ? comp_op0 : comp_op1),
6764 (comp_code == LE_EXPR
6765 ? comp_op1 : comp_op0)))));
6766 break;
6767 case GE_EXPR:
6768 case GT_EXPR:
6769 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
6770 return pedantic_non_lvalue
6771 (convert (type, fold (build (MAX_EXPR, comp_type,
6772 (comp_code == GE_EXPR
6773 ? comp_op0 : comp_op1),
6774 (comp_code == GE_EXPR
6775 ? comp_op1 : comp_op0)))));
6776 break;
6777 default:
6778 abort ();
6779 }
6780 }
6781
6782 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
6783 we might still be able to simplify this. For example,
6784 if C1 is one less or one more than C2, this might have started
6785 out as a MIN or MAX and been transformed by this function.
6786 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
6787
6788 if (INTEGRAL_TYPE_P (type)
6789 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6790 && TREE_CODE (arg2) == INTEGER_CST)
6791 switch (comp_code)
6792 {
6793 case EQ_EXPR:
6794 /* We can replace A with C1 in this case. */
6795 arg1 = convert (type, TREE_OPERAND (arg0, 1));
6796 t = build (code, type, TREE_OPERAND (t, 0), arg1,
6797 TREE_OPERAND (t, 2));
6798 break;
6799
6800 case LT_EXPR:
6801 /* If C1 is C2 + 1, this is min(A, C2). */
6802 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6803 && operand_equal_p (TREE_OPERAND (arg0, 1),
6804 const_binop (PLUS_EXPR, arg2,
6805 integer_one_node, 0), 1))
6806 return pedantic_non_lvalue
6807 (fold (build (MIN_EXPR, type, arg1, arg2)));
6808 break;
6809
6810 case LE_EXPR:
6811 /* If C1 is C2 - 1, this is min(A, C2). */
6812 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
6813 && operand_equal_p (TREE_OPERAND (arg0, 1),
6814 const_binop (MINUS_EXPR, arg2,
6815 integer_one_node, 0), 1))
6816 return pedantic_non_lvalue
6817 (fold (build (MIN_EXPR, type, arg1, arg2)));
6818 break;
6819
6820 case GT_EXPR:
6821 /* If C1 is C2 - 1, this is max(A, C2). */
6822 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
6823 && operand_equal_p (TREE_OPERAND (arg0, 1),
6824 const_binop (MINUS_EXPR, arg2,
6825 integer_one_node, 0), 1))
6826 return pedantic_non_lvalue
6827 (fold (build (MAX_EXPR, type, arg1, arg2)));
6828 break;
6829
6830 case GE_EXPR:
6831 /* If C1 is C2 + 1, this is max(A, C2). */
6832 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6833 && operand_equal_p (TREE_OPERAND (arg0, 1),
6834 const_binop (PLUS_EXPR, arg2,
6835 integer_one_node, 0), 1))
6836 return pedantic_non_lvalue
6837 (fold (build (MAX_EXPR, type, arg1, arg2)));
6838 break;
6839 case NE_EXPR:
6840 break;
6841 default:
6842 abort ();
6843 }
6844 }
6845
6846 /* If the second operand is simpler than the third, swap them
6847 since that produces better jump optimization results. */
6848 if ((TREE_CONSTANT (arg1) || DECL_P (arg1)
6849 || TREE_CODE (arg1) == SAVE_EXPR)
6850 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
6851 || DECL_P (TREE_OPERAND (t, 2))
6852 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
6853 {
6854 /* See if this can be inverted. If it can't, possibly because
6855 it was a floating-point inequality comparison, don't do
6856 anything. */
6857 tem = invert_truthvalue (arg0);
6858
6859 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
6860 {
6861 t = build (code, type, tem,
6862 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
6863 arg0 = tem;
6864 /* arg1 should be the first argument of the new T. */
6865 arg1 = TREE_OPERAND (t, 1);
6866 STRIP_NOPS (arg1);
6867 }
6868 }
6869
6870 /* Convert A ? 1 : 0 to simply A. */
6871 if (integer_onep (TREE_OPERAND (t, 1))
6872 && integer_zerop (TREE_OPERAND (t, 2))
6873 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
6874 call to fold will try to move the conversion inside
6875 a COND, which will recurse. In that case, the COND_EXPR
6876 is probably the best choice, so leave it alone. */
6877 && type == TREE_TYPE (arg0))
6878 return pedantic_non_lvalue (arg0);
6879
6880 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
6881 operation is simply A & 2. */
6882
6883 if (integer_zerop (TREE_OPERAND (t, 2))
6884 && TREE_CODE (arg0) == NE_EXPR
6885 && integer_zerop (TREE_OPERAND (arg0, 1))
6886 && integer_pow2p (arg1)
6887 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
6888 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
6889 arg1, 1))
6890 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
6891
6892 return t;
6893
6894 case COMPOUND_EXPR:
6895 /* When pedantic, a compound expression can be neither an lvalue
6896 nor an integer constant expression. */
6897 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
6898 return t;
6899 /* Don't let (0, 0) be null pointer constant. */
6900 if (integer_zerop (arg1))
6901 return build1 (NOP_EXPR, type, arg1);
6902 return convert (type, arg1);
6903
6904 case COMPLEX_EXPR:
6905 if (wins)
6906 return build_complex (type, arg0, arg1);
6907 return t;
6908
6909 case REALPART_EXPR:
6910 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6911 return t;
6912 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6913 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
6914 TREE_OPERAND (arg0, 1));
6915 else if (TREE_CODE (arg0) == COMPLEX_CST)
6916 return TREE_REALPART (arg0);
6917 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6918 return fold (build (TREE_CODE (arg0), type,
6919 fold (build1 (REALPART_EXPR, type,
6920 TREE_OPERAND (arg0, 0))),
6921 fold (build1 (REALPART_EXPR,
6922 type, TREE_OPERAND (arg0, 1)))));
6923 return t;
6924
6925 case IMAGPART_EXPR:
6926 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6927 return convert (type, integer_zero_node);
6928 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6929 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
6930 TREE_OPERAND (arg0, 0));
6931 else if (TREE_CODE (arg0) == COMPLEX_CST)
6932 return TREE_IMAGPART (arg0);
6933 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6934 return fold (build (TREE_CODE (arg0), type,
6935 fold (build1 (IMAGPART_EXPR, type,
6936 TREE_OPERAND (arg0, 0))),
6937 fold (build1 (IMAGPART_EXPR, type,
6938 TREE_OPERAND (arg0, 1)))));
6939 return t;
6940
6941 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
6942 appropriate. */
6943 case CLEANUP_POINT_EXPR:
6944 if (! has_cleanups (arg0))
6945 return TREE_OPERAND (t, 0);
6946
6947 {
6948 enum tree_code code0 = TREE_CODE (arg0);
6949 int kind0 = TREE_CODE_CLASS (code0);
6950 tree arg00 = TREE_OPERAND (arg0, 0);
6951 tree arg01;
6952
6953 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
6954 return fold (build1 (code0, type,
6955 fold (build1 (CLEANUP_POINT_EXPR,
6956 TREE_TYPE (arg00), arg00))));
6957
6958 if (kind0 == '<' || kind0 == '2'
6959 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
6960 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
6961 || code0 == TRUTH_XOR_EXPR)
6962 {
6963 arg01 = TREE_OPERAND (arg0, 1);
6964
6965 if (TREE_CONSTANT (arg00)
6966 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
6967 && ! has_cleanups (arg00)))
6968 return fold (build (code0, type, arg00,
6969 fold (build1 (CLEANUP_POINT_EXPR,
6970 TREE_TYPE (arg01), arg01))));
6971
6972 if (TREE_CONSTANT (arg01))
6973 return fold (build (code0, type,
6974 fold (build1 (CLEANUP_POINT_EXPR,
6975 TREE_TYPE (arg00), arg00)),
6976 arg01));
6977 }
6978
6979 return t;
6980 }
6981
6982 case CALL_EXPR:
6983 /* Check for a built-in function. */
6984 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
6985 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
6986 == FUNCTION_DECL)
6987 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
6988 {
6989 tree tmp = fold_builtin (expr);
6990 if (tmp)
6991 return tmp;
6992 }
6993 return t;
6994
6995 default:
6996 return t;
6997 } /* switch (code) */
6998 }
6999
7000 /* Determine if first argument is a multiple of second argument. Return 0 if
7001 it is not, or we cannot easily determined it to be.
7002
7003 An example of the sort of thing we care about (at this point; this routine
7004 could surely be made more general, and expanded to do what the *_DIV_EXPR's
7005 fold cases do now) is discovering that
7006
7007 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7008
7009 is a multiple of
7010
7011 SAVE_EXPR (J * 8)
7012
7013 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
7014
7015 This code also handles discovering that
7016
7017 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7018
7019 is a multiple of 8 so we don't have to worry about dealing with a
7020 possible remainder.
7021
7022 Note that we *look* inside a SAVE_EXPR only to determine how it was
7023 calculated; it is not safe for fold to do much of anything else with the
7024 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
7025 at run time. For example, the latter example above *cannot* be implemented
7026 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
7027 evaluation time of the original SAVE_EXPR is not necessarily the same at
7028 the time the new expression is evaluated. The only optimization of this
7029 sort that would be valid is changing
7030
7031 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
7032
7033 divided by 8 to
7034
7035 SAVE_EXPR (I) * SAVE_EXPR (J)
7036
7037 (where the same SAVE_EXPR (J) is used in the original and the
7038 transformed version). */
7039
7040 static int
7041 multiple_of_p (type, top, bottom)
7042 tree type;
7043 tree top;
7044 tree bottom;
7045 {
7046 if (operand_equal_p (top, bottom, 0))
7047 return 1;
7048
7049 if (TREE_CODE (type) != INTEGER_TYPE)
7050 return 0;
7051
7052 switch (TREE_CODE (top))
7053 {
7054 case MULT_EXPR:
7055 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7056 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7057
7058 case PLUS_EXPR:
7059 case MINUS_EXPR:
7060 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7061 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7062
7063 case LSHIFT_EXPR:
7064 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
7065 {
7066 tree op1, t1;
7067
7068 op1 = TREE_OPERAND (top, 1);
7069 /* const_binop may not detect overflow correctly,
7070 so check for it explicitly here. */
7071 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
7072 > TREE_INT_CST_LOW (op1)
7073 && TREE_INT_CST_HIGH (op1) == 0
7074 && 0 != (t1 = convert (type,
7075 const_binop (LSHIFT_EXPR, size_one_node,
7076 op1, 0)))
7077 && ! TREE_OVERFLOW (t1))
7078 return multiple_of_p (type, t1, bottom);
7079 }
7080 return 0;
7081
7082 case NOP_EXPR:
7083 /* Can't handle conversions from non-integral or wider integral type. */
7084 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
7085 || (TYPE_PRECISION (type)
7086 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
7087 return 0;
7088
7089 /* .. fall through ... */
7090
7091 case SAVE_EXPR:
7092 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
7093
7094 case INTEGER_CST:
7095 if (TREE_CODE (bottom) != INTEGER_CST
7096 || (TREE_UNSIGNED (type)
7097 && (tree_int_cst_sgn (top) < 0
7098 || tree_int_cst_sgn (bottom) < 0)))
7099 return 0;
7100 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
7101 top, bottom, 0));
7102
7103 default:
7104 return 0;
7105 }
7106 }
7107
7108 /* Return true if `t' is known to be non-negative. */
7109
7110 int
7111 tree_expr_nonnegative_p (t)
7112 tree t;
7113 {
7114 switch (TREE_CODE (t))
7115 {
7116 case ABS_EXPR:
7117 case FFS_EXPR:
7118 return 1;
7119 case INTEGER_CST:
7120 return tree_int_cst_sgn (t) >= 0;
7121 case TRUNC_DIV_EXPR:
7122 case CEIL_DIV_EXPR:
7123 case FLOOR_DIV_EXPR:
7124 case ROUND_DIV_EXPR:
7125 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7126 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7127 case TRUNC_MOD_EXPR:
7128 case CEIL_MOD_EXPR:
7129 case FLOOR_MOD_EXPR:
7130 case ROUND_MOD_EXPR:
7131 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7132 case COND_EXPR:
7133 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
7134 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
7135 case COMPOUND_EXPR:
7136 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7137 case MIN_EXPR:
7138 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7139 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7140 case MAX_EXPR:
7141 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7142 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7143 case MODIFY_EXPR:
7144 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7145 case BIND_EXPR:
7146 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7147 case SAVE_EXPR:
7148 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7149 case NON_LVALUE_EXPR:
7150 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7151 case RTL_EXPR:
7152 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
7153
7154 default:
7155 if (truth_value_p (TREE_CODE (t)))
7156 /* Truth values evaluate to 0 or 1, which is nonnegative. */
7157 return 1;
7158 else
7159 /* We don't know sign of `t', so be conservative and return false. */
7160 return 0;
7161 }
7162 }
7163
7164 /* Return true if `r' is known to be non-negative.
7165 Only handles constants at the moment. */
7166
7167 int
7168 rtl_expr_nonnegative_p (r)
7169 rtx r;
7170 {
7171 switch (GET_CODE (r))
7172 {
7173 case CONST_INT:
7174 return INTVAL (r) >= 0;
7175
7176 case CONST_DOUBLE:
7177 if (GET_MODE (r) == VOIDmode)
7178 return CONST_DOUBLE_HIGH (r) >= 0;
7179 return 0;
7180
7181 case CONST_VECTOR:
7182 {
7183 int units, i;
7184 rtx elt;
7185
7186 units = CONST_VECTOR_NUNITS (r);
7187
7188 for (i = 0; i < units; ++i)
7189 {
7190 elt = CONST_VECTOR_ELT (r, i);
7191 if (!rtl_expr_nonnegative_p (elt))
7192 return 0;
7193 }
7194
7195 return 1;
7196 }
7197
7198 case SYMBOL_REF:
7199 case LABEL_REF:
7200 /* These are always nonnegative. */
7201 return 1;
7202
7203 default:
7204 return 0;
7205 }
7206 }