c-typeck.c (build_binary_op): Kill BIT_ANDTC_EXPR.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "real.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "toplev.h"
56 #include "ggc.h"
57 #include "hashtab.h"
58 #include "langhooks.h"
59 #include "md5.h"
60
61 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
62 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
63 static bool negate_expr_p (tree);
64 static tree negate_expr (tree);
65 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
66 static tree associate_trees (tree, tree, enum tree_code, tree);
67 static tree int_const_binop (enum tree_code, tree, tree, int);
68 static tree const_binop (enum tree_code, tree, tree, int);
69 static hashval_t size_htab_hash (const void *);
70 static int size_htab_eq (const void *, const void *);
71 static tree fold_convert (tree, tree);
72 static enum tree_code invert_tree_comparison (enum tree_code);
73 static enum tree_code swap_tree_comparison (enum tree_code);
74 static int comparison_to_compcode (enum tree_code);
75 static enum tree_code compcode_to_comparison (int);
76 static int truth_value_p (enum tree_code);
77 static int operand_equal_for_comparison_p (tree, tree, tree);
78 static int twoval_comparison_p (tree, tree *, tree *, int *);
79 static tree eval_subst (tree, tree, tree, tree, tree);
80 static tree pedantic_omit_one_operand (tree, tree, tree);
81 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
82 static tree make_bit_field_ref (tree, tree, int, int, int);
83 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
84 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
85 enum machine_mode *, int *, int *,
86 tree *, tree *);
87 static int all_ones_mask_p (tree, int);
88 static tree sign_bit_p (tree, tree);
89 static int simple_operand_p (tree);
90 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
91 static tree make_range (tree, int *, tree *, tree *);
92 static tree build_range_check (tree, tree, int, tree, tree);
93 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
94 tree);
95 static tree fold_range_test (tree);
96 static tree unextend (tree, int, int, tree);
97 static tree fold_truthop (enum tree_code, tree, tree, tree);
98 static tree optimize_minmax_comparison (tree);
99 static tree extract_muldiv (tree, tree, enum tree_code, tree);
100 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
101 static tree strip_compound_expr (tree, tree);
102 static int multiple_of_p (tree, tree, tree);
103 static tree constant_boolean_node (int, tree);
104 static int count_cond (tree, int);
105 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
106 tree, int);
107 static bool fold_real_zero_addition_p (tree, tree, int);
108 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
109 tree, tree, tree);
110 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
111
112 /* The following constants represent a bit based encoding of GCC's
113 comparison operators. This encoding simplifies transformations
114 on relational comparison operators, such as AND and OR. */
115 #define COMPCODE_FALSE 0
116 #define COMPCODE_LT 1
117 #define COMPCODE_EQ 2
118 #define COMPCODE_LE 3
119 #define COMPCODE_GT 4
120 #define COMPCODE_NE 5
121 #define COMPCODE_GE 6
122 #define COMPCODE_TRUE 7
123
124 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
125 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
126 and SUM1. Then this yields nonzero if overflow occurred during the
127 addition.
128
129 Overflow occurs if A and B have the same sign, but A and SUM differ in
130 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
131 sign. */
132 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
133 \f
134 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
135 We do that by representing the two-word integer in 4 words, with only
136 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
137 number. The value of the word is LOWPART + HIGHPART * BASE. */
138
139 #define LOWPART(x) \
140 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
141 #define HIGHPART(x) \
142 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
143 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
144
145 /* Unpack a two-word integer into 4 words.
146 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
147 WORDS points to the array of HOST_WIDE_INTs. */
148
149 static void
150 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
151 {
152 words[0] = LOWPART (low);
153 words[1] = HIGHPART (low);
154 words[2] = LOWPART (hi);
155 words[3] = HIGHPART (hi);
156 }
157
158 /* Pack an array of 4 words into a two-word integer.
159 WORDS points to the array of words.
160 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
161
162 static void
163 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
164 HOST_WIDE_INT *hi)
165 {
166 *low = words[0] + words[1] * BASE;
167 *hi = words[2] + words[3] * BASE;
168 }
169 \f
170 /* Make the integer constant T valid for its type by setting to 0 or 1 all
171 the bits in the constant that don't belong in the type.
172
173 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
174 nonzero, a signed overflow has already occurred in calculating T, so
175 propagate it. */
176
177 int
178 force_fit_type (tree t, int overflow)
179 {
180 unsigned HOST_WIDE_INT low;
181 HOST_WIDE_INT high;
182 unsigned int prec;
183
184 if (TREE_CODE (t) == REAL_CST)
185 {
186 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
187 Consider doing it via real_convert now. */
188 return overflow;
189 }
190
191 else if (TREE_CODE (t) != INTEGER_CST)
192 return overflow;
193
194 low = TREE_INT_CST_LOW (t);
195 high = TREE_INT_CST_HIGH (t);
196
197 if (POINTER_TYPE_P (TREE_TYPE (t))
198 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
199 prec = POINTER_SIZE;
200 else
201 prec = TYPE_PRECISION (TREE_TYPE (t));
202
203 /* First clear all bits that are beyond the type's precision. */
204
205 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
206 ;
207 else if (prec > HOST_BITS_PER_WIDE_INT)
208 TREE_INT_CST_HIGH (t)
209 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
210 else
211 {
212 TREE_INT_CST_HIGH (t) = 0;
213 if (prec < HOST_BITS_PER_WIDE_INT)
214 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
215 }
216
217 /* Unsigned types do not suffer sign extension or overflow unless they
218 are a sizetype. */
219 if (TREE_UNSIGNED (TREE_TYPE (t))
220 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
221 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
222 return overflow;
223
224 /* If the value's sign bit is set, extend the sign. */
225 if (prec != 2 * HOST_BITS_PER_WIDE_INT
226 && (prec > HOST_BITS_PER_WIDE_INT
227 ? 0 != (TREE_INT_CST_HIGH (t)
228 & ((HOST_WIDE_INT) 1
229 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
230 : 0 != (TREE_INT_CST_LOW (t)
231 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
232 {
233 /* Value is negative:
234 set to 1 all the bits that are outside this type's precision. */
235 if (prec > HOST_BITS_PER_WIDE_INT)
236 TREE_INT_CST_HIGH (t)
237 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
238 else
239 {
240 TREE_INT_CST_HIGH (t) = -1;
241 if (prec < HOST_BITS_PER_WIDE_INT)
242 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
243 }
244 }
245
246 /* Return nonzero if signed overflow occurred. */
247 return
248 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
249 != 0);
250 }
251 \f
252 /* Add two doubleword integers with doubleword result.
253 Each argument is given as two `HOST_WIDE_INT' pieces.
254 One argument is L1 and H1; the other, L2 and H2.
255 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
256
257 int
258 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
259 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
260 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
261 {
262 unsigned HOST_WIDE_INT l;
263 HOST_WIDE_INT h;
264
265 l = l1 + l2;
266 h = h1 + h2 + (l < l1);
267
268 *lv = l;
269 *hv = h;
270 return OVERFLOW_SUM_SIGN (h1, h2, h);
271 }
272
273 /* Negate a doubleword integer with doubleword result.
274 Return nonzero if the operation overflows, assuming it's signed.
275 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
276 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
277
278 int
279 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
280 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
281 {
282 if (l1 == 0)
283 {
284 *lv = 0;
285 *hv = - h1;
286 return (*hv & h1) < 0;
287 }
288 else
289 {
290 *lv = -l1;
291 *hv = ~h1;
292 return 0;
293 }
294 }
295 \f
296 /* Multiply two doubleword integers with doubleword result.
297 Return nonzero if the operation overflows, assuming it's signed.
298 Each argument is given as two `HOST_WIDE_INT' pieces.
299 One argument is L1 and H1; the other, L2 and H2.
300 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
301
302 int
303 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
304 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
305 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
306 {
307 HOST_WIDE_INT arg1[4];
308 HOST_WIDE_INT arg2[4];
309 HOST_WIDE_INT prod[4 * 2];
310 unsigned HOST_WIDE_INT carry;
311 int i, j, k;
312 unsigned HOST_WIDE_INT toplow, neglow;
313 HOST_WIDE_INT tophigh, neghigh;
314
315 encode (arg1, l1, h1);
316 encode (arg2, l2, h2);
317
318 memset (prod, 0, sizeof prod);
319
320 for (i = 0; i < 4; i++)
321 {
322 carry = 0;
323 for (j = 0; j < 4; j++)
324 {
325 k = i + j;
326 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
327 carry += arg1[i] * arg2[j];
328 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
329 carry += prod[k];
330 prod[k] = LOWPART (carry);
331 carry = HIGHPART (carry);
332 }
333 prod[i + 4] = carry;
334 }
335
336 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
337
338 /* Check for overflow by calculating the top half of the answer in full;
339 it should agree with the low half's sign bit. */
340 decode (prod + 4, &toplow, &tophigh);
341 if (h1 < 0)
342 {
343 neg_double (l2, h2, &neglow, &neghigh);
344 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
345 }
346 if (h2 < 0)
347 {
348 neg_double (l1, h1, &neglow, &neghigh);
349 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
350 }
351 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
352 }
353 \f
354 /* Shift the doubleword integer in L1, H1 left by COUNT places
355 keeping only PREC bits of result.
356 Shift right if COUNT is negative.
357 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
358 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
359
360 void
361 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
362 HOST_WIDE_INT count, unsigned int prec,
363 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
364 {
365 unsigned HOST_WIDE_INT signmask;
366
367 if (count < 0)
368 {
369 rshift_double (l1, h1, -count, prec, lv, hv, arith);
370 return;
371 }
372
373 #ifdef SHIFT_COUNT_TRUNCATED
374 if (SHIFT_COUNT_TRUNCATED)
375 count %= prec;
376 #endif
377
378 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
379 {
380 /* Shifting by the host word size is undefined according to the
381 ANSI standard, so we must handle this as a special case. */
382 *hv = 0;
383 *lv = 0;
384 }
385 else if (count >= HOST_BITS_PER_WIDE_INT)
386 {
387 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
388 *lv = 0;
389 }
390 else
391 {
392 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
393 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
394 *lv = l1 << count;
395 }
396
397 /* Sign extend all bits that are beyond the precision. */
398
399 signmask = -((prec > HOST_BITS_PER_WIDE_INT
400 ? ((unsigned HOST_WIDE_INT) *hv
401 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
402 : (*lv >> (prec - 1))) & 1);
403
404 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
405 ;
406 else if (prec >= HOST_BITS_PER_WIDE_INT)
407 {
408 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
409 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
410 }
411 else
412 {
413 *hv = signmask;
414 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
415 *lv |= signmask << prec;
416 }
417 }
418
419 /* Shift the doubleword integer in L1, H1 right by COUNT places
420 keeping only PREC bits of result. COUNT must be positive.
421 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
422 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
423
424 void
425 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
426 HOST_WIDE_INT count, unsigned int prec,
427 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
428 int arith)
429 {
430 unsigned HOST_WIDE_INT signmask;
431
432 signmask = (arith
433 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
434 : 0);
435
436 #ifdef SHIFT_COUNT_TRUNCATED
437 if (SHIFT_COUNT_TRUNCATED)
438 count %= prec;
439 #endif
440
441 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
442 {
443 /* Shifting by the host word size is undefined according to the
444 ANSI standard, so we must handle this as a special case. */
445 *hv = 0;
446 *lv = 0;
447 }
448 else if (count >= HOST_BITS_PER_WIDE_INT)
449 {
450 *hv = 0;
451 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
452 }
453 else
454 {
455 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
456 *lv = ((l1 >> count)
457 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
458 }
459
460 /* Zero / sign extend all bits that are beyond the precision. */
461
462 if (count >= (HOST_WIDE_INT)prec)
463 {
464 *hv = signmask;
465 *lv = signmask;
466 }
467 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
468 ;
469 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
470 {
471 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
472 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
473 }
474 else
475 {
476 *hv = signmask;
477 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
478 *lv |= signmask << (prec - count);
479 }
480 }
481 \f
482 /* Rotate the doubleword integer in L1, H1 left by COUNT places
483 keeping only PREC bits of result.
484 Rotate right if COUNT is negative.
485 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
486
487 void
488 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
489 HOST_WIDE_INT count, unsigned int prec,
490 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
491 {
492 unsigned HOST_WIDE_INT s1l, s2l;
493 HOST_WIDE_INT s1h, s2h;
494
495 count %= prec;
496 if (count < 0)
497 count += prec;
498
499 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
500 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
501 *lv = s1l | s2l;
502 *hv = s1h | s2h;
503 }
504
505 /* Rotate the doubleword integer in L1, H1 left by COUNT places
506 keeping only PREC bits of result. COUNT must be positive.
507 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
508
509 void
510 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
511 HOST_WIDE_INT count, unsigned int prec,
512 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
513 {
514 unsigned HOST_WIDE_INT s1l, s2l;
515 HOST_WIDE_INT s1h, s2h;
516
517 count %= prec;
518 if (count < 0)
519 count += prec;
520
521 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
522 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
523 *lv = s1l | s2l;
524 *hv = s1h | s2h;
525 }
526 \f
527 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
528 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
529 CODE is a tree code for a kind of division, one of
530 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
531 or EXACT_DIV_EXPR
532 It controls how the quotient is rounded to an integer.
533 Return nonzero if the operation overflows.
534 UNS nonzero says do unsigned division. */
535
536 int
537 div_and_round_double (enum tree_code code, int uns,
538 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
539 HOST_WIDE_INT hnum_orig,
540 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
541 HOST_WIDE_INT hden_orig,
542 unsigned HOST_WIDE_INT *lquo,
543 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
544 HOST_WIDE_INT *hrem)
545 {
546 int quo_neg = 0;
547 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
548 HOST_WIDE_INT den[4], quo[4];
549 int i, j;
550 unsigned HOST_WIDE_INT work;
551 unsigned HOST_WIDE_INT carry = 0;
552 unsigned HOST_WIDE_INT lnum = lnum_orig;
553 HOST_WIDE_INT hnum = hnum_orig;
554 unsigned HOST_WIDE_INT lden = lden_orig;
555 HOST_WIDE_INT hden = hden_orig;
556 int overflow = 0;
557
558 if (hden == 0 && lden == 0)
559 overflow = 1, lden = 1;
560
561 /* Calculate quotient sign and convert operands to unsigned. */
562 if (!uns)
563 {
564 if (hnum < 0)
565 {
566 quo_neg = ~ quo_neg;
567 /* (minimum integer) / (-1) is the only overflow case. */
568 if (neg_double (lnum, hnum, &lnum, &hnum)
569 && ((HOST_WIDE_INT) lden & hden) == -1)
570 overflow = 1;
571 }
572 if (hden < 0)
573 {
574 quo_neg = ~ quo_neg;
575 neg_double (lden, hden, &lden, &hden);
576 }
577 }
578
579 if (hnum == 0 && hden == 0)
580 { /* single precision */
581 *hquo = *hrem = 0;
582 /* This unsigned division rounds toward zero. */
583 *lquo = lnum / lden;
584 goto finish_up;
585 }
586
587 if (hnum == 0)
588 { /* trivial case: dividend < divisor */
589 /* hden != 0 already checked. */
590 *hquo = *lquo = 0;
591 *hrem = hnum;
592 *lrem = lnum;
593 goto finish_up;
594 }
595
596 memset (quo, 0, sizeof quo);
597
598 memset (num, 0, sizeof num); /* to zero 9th element */
599 memset (den, 0, sizeof den);
600
601 encode (num, lnum, hnum);
602 encode (den, lden, hden);
603
604 /* Special code for when the divisor < BASE. */
605 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
606 {
607 /* hnum != 0 already checked. */
608 for (i = 4 - 1; i >= 0; i--)
609 {
610 work = num[i] + carry * BASE;
611 quo[i] = work / lden;
612 carry = work % lden;
613 }
614 }
615 else
616 {
617 /* Full double precision division,
618 with thanks to Don Knuth's "Seminumerical Algorithms". */
619 int num_hi_sig, den_hi_sig;
620 unsigned HOST_WIDE_INT quo_est, scale;
621
622 /* Find the highest nonzero divisor digit. */
623 for (i = 4 - 1;; i--)
624 if (den[i] != 0)
625 {
626 den_hi_sig = i;
627 break;
628 }
629
630 /* Insure that the first digit of the divisor is at least BASE/2.
631 This is required by the quotient digit estimation algorithm. */
632
633 scale = BASE / (den[den_hi_sig] + 1);
634 if (scale > 1)
635 { /* scale divisor and dividend */
636 carry = 0;
637 for (i = 0; i <= 4 - 1; i++)
638 {
639 work = (num[i] * scale) + carry;
640 num[i] = LOWPART (work);
641 carry = HIGHPART (work);
642 }
643
644 num[4] = carry;
645 carry = 0;
646 for (i = 0; i <= 4 - 1; i++)
647 {
648 work = (den[i] * scale) + carry;
649 den[i] = LOWPART (work);
650 carry = HIGHPART (work);
651 if (den[i] != 0) den_hi_sig = i;
652 }
653 }
654
655 num_hi_sig = 4;
656
657 /* Main loop */
658 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
659 {
660 /* Guess the next quotient digit, quo_est, by dividing the first
661 two remaining dividend digits by the high order quotient digit.
662 quo_est is never low and is at most 2 high. */
663 unsigned HOST_WIDE_INT tmp;
664
665 num_hi_sig = i + den_hi_sig + 1;
666 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
667 if (num[num_hi_sig] != den[den_hi_sig])
668 quo_est = work / den[den_hi_sig];
669 else
670 quo_est = BASE - 1;
671
672 /* Refine quo_est so it's usually correct, and at most one high. */
673 tmp = work - quo_est * den[den_hi_sig];
674 if (tmp < BASE
675 && (den[den_hi_sig - 1] * quo_est
676 > (tmp * BASE + num[num_hi_sig - 2])))
677 quo_est--;
678
679 /* Try QUO_EST as the quotient digit, by multiplying the
680 divisor by QUO_EST and subtracting from the remaining dividend.
681 Keep in mind that QUO_EST is the I - 1st digit. */
682
683 carry = 0;
684 for (j = 0; j <= den_hi_sig; j++)
685 {
686 work = quo_est * den[j] + carry;
687 carry = HIGHPART (work);
688 work = num[i + j] - LOWPART (work);
689 num[i + j] = LOWPART (work);
690 carry += HIGHPART (work) != 0;
691 }
692
693 /* If quo_est was high by one, then num[i] went negative and
694 we need to correct things. */
695 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
696 {
697 quo_est--;
698 carry = 0; /* add divisor back in */
699 for (j = 0; j <= den_hi_sig; j++)
700 {
701 work = num[i + j] + den[j] + carry;
702 carry = HIGHPART (work);
703 num[i + j] = LOWPART (work);
704 }
705
706 num [num_hi_sig] += carry;
707 }
708
709 /* Store the quotient digit. */
710 quo[i] = quo_est;
711 }
712 }
713
714 decode (quo, lquo, hquo);
715
716 finish_up:
717 /* If result is negative, make it so. */
718 if (quo_neg)
719 neg_double (*lquo, *hquo, lquo, hquo);
720
721 /* compute trial remainder: rem = num - (quo * den) */
722 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
723 neg_double (*lrem, *hrem, lrem, hrem);
724 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
725
726 switch (code)
727 {
728 case TRUNC_DIV_EXPR:
729 case TRUNC_MOD_EXPR: /* round toward zero */
730 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
731 return overflow;
732
733 case FLOOR_DIV_EXPR:
734 case FLOOR_MOD_EXPR: /* round toward negative infinity */
735 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
736 {
737 /* quo = quo - 1; */
738 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
739 lquo, hquo);
740 }
741 else
742 return overflow;
743 break;
744
745 case CEIL_DIV_EXPR:
746 case CEIL_MOD_EXPR: /* round toward positive infinity */
747 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
748 {
749 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
750 lquo, hquo);
751 }
752 else
753 return overflow;
754 break;
755
756 case ROUND_DIV_EXPR:
757 case ROUND_MOD_EXPR: /* round to closest integer */
758 {
759 unsigned HOST_WIDE_INT labs_rem = *lrem;
760 HOST_WIDE_INT habs_rem = *hrem;
761 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
762 HOST_WIDE_INT habs_den = hden, htwice;
763
764 /* Get absolute values. */
765 if (*hrem < 0)
766 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
767 if (hden < 0)
768 neg_double (lden, hden, &labs_den, &habs_den);
769
770 /* If (2 * abs (lrem) >= abs (lden)) */
771 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
772 labs_rem, habs_rem, &ltwice, &htwice);
773
774 if (((unsigned HOST_WIDE_INT) habs_den
775 < (unsigned HOST_WIDE_INT) htwice)
776 || (((unsigned HOST_WIDE_INT) habs_den
777 == (unsigned HOST_WIDE_INT) htwice)
778 && (labs_den < ltwice)))
779 {
780 if (*hquo < 0)
781 /* quo = quo - 1; */
782 add_double (*lquo, *hquo,
783 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
784 else
785 /* quo = quo + 1; */
786 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
787 lquo, hquo);
788 }
789 else
790 return overflow;
791 }
792 break;
793
794 default:
795 abort ();
796 }
797
798 /* compute true remainder: rem = num - (quo * den) */
799 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
800 neg_double (*lrem, *hrem, lrem, hrem);
801 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
802 return overflow;
803 }
804 \f
805 /* Determine whether an expression T can be cheaply negated using
806 the function negate_expr. */
807
808 static bool
809 negate_expr_p (tree t)
810 {
811 unsigned HOST_WIDE_INT val;
812 unsigned int prec;
813 tree type;
814
815 if (t == 0)
816 return false;
817
818 type = TREE_TYPE (t);
819
820 STRIP_SIGN_NOPS (t);
821 switch (TREE_CODE (t))
822 {
823 case INTEGER_CST:
824 if (TREE_UNSIGNED (type))
825 return false;
826
827 /* Check that -CST will not overflow type. */
828 prec = TYPE_PRECISION (type);
829 if (prec > HOST_BITS_PER_WIDE_INT)
830 {
831 if (TREE_INT_CST_LOW (t) != 0)
832 return true;
833 prec -= HOST_BITS_PER_WIDE_INT;
834 val = TREE_INT_CST_HIGH (t);
835 }
836 else
837 val = TREE_INT_CST_LOW (t);
838 if (prec < HOST_BITS_PER_WIDE_INT)
839 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
840 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
841
842 case REAL_CST:
843 case NEGATE_EXPR:
844 return true;
845
846 case MINUS_EXPR:
847 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
848 return ! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations;
849
850 case MULT_EXPR:
851 if (TREE_UNSIGNED (TREE_TYPE (t)))
852 break;
853
854 /* Fall through. */
855
856 case RDIV_EXPR:
857 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
858 return negate_expr_p (TREE_OPERAND (t, 1))
859 || negate_expr_p (TREE_OPERAND (t, 0));
860 break;
861
862 default:
863 break;
864 }
865 return false;
866 }
867
868 /* Given T, an expression, return the negation of T. Allow for T to be
869 null, in which case return null. */
870
871 static tree
872 negate_expr (tree t)
873 {
874 tree type;
875 tree tem;
876
877 if (t == 0)
878 return 0;
879
880 type = TREE_TYPE (t);
881 STRIP_SIGN_NOPS (t);
882
883 switch (TREE_CODE (t))
884 {
885 case INTEGER_CST:
886 if (! TREE_UNSIGNED (type)
887 && 0 != (tem = fold (build1 (NEGATE_EXPR, type, t)))
888 && ! TREE_OVERFLOW (tem))
889 return tem;
890 break;
891
892 case REAL_CST:
893 tem = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (t)));
894 /* Two's complement FP formats, such as c4x, may overflow. */
895 if (! TREE_OVERFLOW (tem))
896 return convert (type, tem);
897 break;
898
899 case NEGATE_EXPR:
900 return convert (type, TREE_OPERAND (t, 0));
901
902 case MINUS_EXPR:
903 /* - (A - B) -> B - A */
904 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
905 return convert (type,
906 fold (build (MINUS_EXPR, TREE_TYPE (t),
907 TREE_OPERAND (t, 1),
908 TREE_OPERAND (t, 0))));
909 break;
910
911 case MULT_EXPR:
912 if (TREE_UNSIGNED (TREE_TYPE (t)))
913 break;
914
915 /* Fall through. */
916
917 case RDIV_EXPR:
918 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
919 {
920 tem = TREE_OPERAND (t, 1);
921 if (negate_expr_p (tem))
922 return convert (type,
923 fold (build (TREE_CODE (t), TREE_TYPE (t),
924 TREE_OPERAND (t, 0),
925 negate_expr (tem))));
926 tem = TREE_OPERAND (t, 0);
927 if (negate_expr_p (tem))
928 return convert (type,
929 fold (build (TREE_CODE (t), TREE_TYPE (t),
930 negate_expr (tem),
931 TREE_OPERAND (t, 1))));
932 }
933 break;
934
935 default:
936 break;
937 }
938
939 return convert (type, fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t)));
940 }
941 \f
942 /* Split a tree IN into a constant, literal and variable parts that could be
943 combined with CODE to make IN. "constant" means an expression with
944 TREE_CONSTANT but that isn't an actual constant. CODE must be a
945 commutative arithmetic operation. Store the constant part into *CONP,
946 the literal in *LITP and return the variable part. If a part isn't
947 present, set it to null. If the tree does not decompose in this way,
948 return the entire tree as the variable part and the other parts as null.
949
950 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
951 case, we negate an operand that was subtracted. Except if it is a
952 literal for which we use *MINUS_LITP instead.
953
954 If NEGATE_P is true, we are negating all of IN, again except a literal
955 for which we use *MINUS_LITP instead.
956
957 If IN is itself a literal or constant, return it as appropriate.
958
959 Note that we do not guarantee that any of the three values will be the
960 same type as IN, but they will have the same signedness and mode. */
961
962 static tree
963 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
964 tree *minus_litp, int negate_p)
965 {
966 tree var = 0;
967
968 *conp = 0;
969 *litp = 0;
970 *minus_litp = 0;
971
972 /* Strip any conversions that don't change the machine mode or signedness. */
973 STRIP_SIGN_NOPS (in);
974
975 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
976 *litp = in;
977 else if (TREE_CODE (in) == code
978 || (! FLOAT_TYPE_P (TREE_TYPE (in))
979 /* We can associate addition and subtraction together (even
980 though the C standard doesn't say so) for integers because
981 the value is not affected. For reals, the value might be
982 affected, so we can't. */
983 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
984 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
985 {
986 tree op0 = TREE_OPERAND (in, 0);
987 tree op1 = TREE_OPERAND (in, 1);
988 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
989 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
990
991 /* First see if either of the operands is a literal, then a constant. */
992 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
993 *litp = op0, op0 = 0;
994 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
995 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
996
997 if (op0 != 0 && TREE_CONSTANT (op0))
998 *conp = op0, op0 = 0;
999 else if (op1 != 0 && TREE_CONSTANT (op1))
1000 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1001
1002 /* If we haven't dealt with either operand, this is not a case we can
1003 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1004 if (op0 != 0 && op1 != 0)
1005 var = in;
1006 else if (op0 != 0)
1007 var = op0;
1008 else
1009 var = op1, neg_var_p = neg1_p;
1010
1011 /* Now do any needed negations. */
1012 if (neg_litp_p)
1013 *minus_litp = *litp, *litp = 0;
1014 if (neg_conp_p)
1015 *conp = negate_expr (*conp);
1016 if (neg_var_p)
1017 var = negate_expr (var);
1018 }
1019 else if (TREE_CONSTANT (in))
1020 *conp = in;
1021 else
1022 var = in;
1023
1024 if (negate_p)
1025 {
1026 if (*litp)
1027 *minus_litp = *litp, *litp = 0;
1028 else if (*minus_litp)
1029 *litp = *minus_litp, *minus_litp = 0;
1030 *conp = negate_expr (*conp);
1031 var = negate_expr (var);
1032 }
1033
1034 return var;
1035 }
1036
1037 /* Re-associate trees split by the above function. T1 and T2 are either
1038 expressions to associate or null. Return the new expression, if any. If
1039 we build an operation, do it in TYPE and with CODE. */
1040
1041 static tree
1042 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1043 {
1044 if (t1 == 0)
1045 return t2;
1046 else if (t2 == 0)
1047 return t1;
1048
1049 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1050 try to fold this since we will have infinite recursion. But do
1051 deal with any NEGATE_EXPRs. */
1052 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1053 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1054 {
1055 if (code == PLUS_EXPR)
1056 {
1057 if (TREE_CODE (t1) == NEGATE_EXPR)
1058 return build (MINUS_EXPR, type, convert (type, t2),
1059 convert (type, TREE_OPERAND (t1, 0)));
1060 else if (TREE_CODE (t2) == NEGATE_EXPR)
1061 return build (MINUS_EXPR, type, convert (type, t1),
1062 convert (type, TREE_OPERAND (t2, 0)));
1063 }
1064 return build (code, type, convert (type, t1), convert (type, t2));
1065 }
1066
1067 return fold (build (code, type, convert (type, t1), convert (type, t2)));
1068 }
1069 \f
1070 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1071 to produce a new constant.
1072
1073 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1074
1075 static tree
1076 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1077 {
1078 unsigned HOST_WIDE_INT int1l, int2l;
1079 HOST_WIDE_INT int1h, int2h;
1080 unsigned HOST_WIDE_INT low;
1081 HOST_WIDE_INT hi;
1082 unsigned HOST_WIDE_INT garbagel;
1083 HOST_WIDE_INT garbageh;
1084 tree t;
1085 tree type = TREE_TYPE (arg1);
1086 int uns = TREE_UNSIGNED (type);
1087 int is_sizetype
1088 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1089 int overflow = 0;
1090 int no_overflow = 0;
1091
1092 int1l = TREE_INT_CST_LOW (arg1);
1093 int1h = TREE_INT_CST_HIGH (arg1);
1094 int2l = TREE_INT_CST_LOW (arg2);
1095 int2h = TREE_INT_CST_HIGH (arg2);
1096
1097 switch (code)
1098 {
1099 case BIT_IOR_EXPR:
1100 low = int1l | int2l, hi = int1h | int2h;
1101 break;
1102
1103 case BIT_XOR_EXPR:
1104 low = int1l ^ int2l, hi = int1h ^ int2h;
1105 break;
1106
1107 case BIT_AND_EXPR:
1108 low = int1l & int2l, hi = int1h & int2h;
1109 break;
1110
1111 case RSHIFT_EXPR:
1112 int2l = -int2l;
1113 case LSHIFT_EXPR:
1114 /* It's unclear from the C standard whether shifts can overflow.
1115 The following code ignores overflow; perhaps a C standard
1116 interpretation ruling is needed. */
1117 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1118 &low, &hi, !uns);
1119 no_overflow = 1;
1120 break;
1121
1122 case RROTATE_EXPR:
1123 int2l = - int2l;
1124 case LROTATE_EXPR:
1125 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1126 &low, &hi);
1127 break;
1128
1129 case PLUS_EXPR:
1130 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1131 break;
1132
1133 case MINUS_EXPR:
1134 neg_double (int2l, int2h, &low, &hi);
1135 add_double (int1l, int1h, low, hi, &low, &hi);
1136 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1137 break;
1138
1139 case MULT_EXPR:
1140 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1141 break;
1142
1143 case TRUNC_DIV_EXPR:
1144 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1145 case EXACT_DIV_EXPR:
1146 /* This is a shortcut for a common special case. */
1147 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1148 && ! TREE_CONSTANT_OVERFLOW (arg1)
1149 && ! TREE_CONSTANT_OVERFLOW (arg2)
1150 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1151 {
1152 if (code == CEIL_DIV_EXPR)
1153 int1l += int2l - 1;
1154
1155 low = int1l / int2l, hi = 0;
1156 break;
1157 }
1158
1159 /* ... fall through ... */
1160
1161 case ROUND_DIV_EXPR:
1162 if (int2h == 0 && int2l == 1)
1163 {
1164 low = int1l, hi = int1h;
1165 break;
1166 }
1167 if (int1l == int2l && int1h == int2h
1168 && ! (int1l == 0 && int1h == 0))
1169 {
1170 low = 1, hi = 0;
1171 break;
1172 }
1173 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1174 &low, &hi, &garbagel, &garbageh);
1175 break;
1176
1177 case TRUNC_MOD_EXPR:
1178 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1179 /* This is a shortcut for a common special case. */
1180 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1181 && ! TREE_CONSTANT_OVERFLOW (arg1)
1182 && ! TREE_CONSTANT_OVERFLOW (arg2)
1183 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1184 {
1185 if (code == CEIL_MOD_EXPR)
1186 int1l += int2l - 1;
1187 low = int1l % int2l, hi = 0;
1188 break;
1189 }
1190
1191 /* ... fall through ... */
1192
1193 case ROUND_MOD_EXPR:
1194 overflow = div_and_round_double (code, uns,
1195 int1l, int1h, int2l, int2h,
1196 &garbagel, &garbageh, &low, &hi);
1197 break;
1198
1199 case MIN_EXPR:
1200 case MAX_EXPR:
1201 if (uns)
1202 low = (((unsigned HOST_WIDE_INT) int1h
1203 < (unsigned HOST_WIDE_INT) int2h)
1204 || (((unsigned HOST_WIDE_INT) int1h
1205 == (unsigned HOST_WIDE_INT) int2h)
1206 && int1l < int2l));
1207 else
1208 low = (int1h < int2h
1209 || (int1h == int2h && int1l < int2l));
1210
1211 if (low == (code == MIN_EXPR))
1212 low = int1l, hi = int1h;
1213 else
1214 low = int2l, hi = int2h;
1215 break;
1216
1217 default:
1218 abort ();
1219 }
1220
1221 /* If this is for a sizetype, can be represented as one (signed)
1222 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1223 constants. */
1224 if (is_sizetype
1225 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1226 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1227 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1228 return size_int_type_wide (low, type);
1229 else
1230 {
1231 t = build_int_2 (low, hi);
1232 TREE_TYPE (t) = TREE_TYPE (arg1);
1233 }
1234
1235 TREE_OVERFLOW (t)
1236 = ((notrunc
1237 ? (!uns || is_sizetype) && overflow
1238 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1239 && ! no_overflow))
1240 | TREE_OVERFLOW (arg1)
1241 | TREE_OVERFLOW (arg2));
1242
1243 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1244 So check if force_fit_type truncated the value. */
1245 if (is_sizetype
1246 && ! TREE_OVERFLOW (t)
1247 && (TREE_INT_CST_HIGH (t) != hi
1248 || TREE_INT_CST_LOW (t) != low))
1249 TREE_OVERFLOW (t) = 1;
1250
1251 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1252 | TREE_CONSTANT_OVERFLOW (arg1)
1253 | TREE_CONSTANT_OVERFLOW (arg2));
1254 return t;
1255 }
1256
1257 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1258 constant. We assume ARG1 and ARG2 have the same data type, or at least
1259 are the same kind of constant and the same machine mode.
1260
1261 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1262
1263 static tree
1264 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1265 {
1266 STRIP_NOPS (arg1);
1267 STRIP_NOPS (arg2);
1268
1269 if (TREE_CODE (arg1) == INTEGER_CST)
1270 return int_const_binop (code, arg1, arg2, notrunc);
1271
1272 if (TREE_CODE (arg1) == REAL_CST)
1273 {
1274 enum machine_mode mode;
1275 REAL_VALUE_TYPE d1;
1276 REAL_VALUE_TYPE d2;
1277 REAL_VALUE_TYPE value;
1278 tree t, type;
1279
1280 d1 = TREE_REAL_CST (arg1);
1281 d2 = TREE_REAL_CST (arg2);
1282
1283 type = TREE_TYPE (arg1);
1284 mode = TYPE_MODE (type);
1285
1286 /* Don't perform operation if we honor signaling NaNs and
1287 either operand is a NaN. */
1288 if (HONOR_SNANS (mode)
1289 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1290 return NULL_TREE;
1291
1292 /* Don't perform operation if it would raise a division
1293 by zero exception. */
1294 if (code == RDIV_EXPR
1295 && REAL_VALUES_EQUAL (d2, dconst0)
1296 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1297 return NULL_TREE;
1298
1299 /* If either operand is a NaN, just return it. Otherwise, set up
1300 for floating-point trap; we return an overflow. */
1301 if (REAL_VALUE_ISNAN (d1))
1302 return arg1;
1303 else if (REAL_VALUE_ISNAN (d2))
1304 return arg2;
1305
1306 REAL_ARITHMETIC (value, code, d1, d2);
1307
1308 t = build_real (type, real_value_truncate (mode, value));
1309
1310 TREE_OVERFLOW (t)
1311 = (force_fit_type (t, 0)
1312 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1313 TREE_CONSTANT_OVERFLOW (t)
1314 = TREE_OVERFLOW (t)
1315 | TREE_CONSTANT_OVERFLOW (arg1)
1316 | TREE_CONSTANT_OVERFLOW (arg2);
1317 return t;
1318 }
1319 if (TREE_CODE (arg1) == COMPLEX_CST)
1320 {
1321 tree type = TREE_TYPE (arg1);
1322 tree r1 = TREE_REALPART (arg1);
1323 tree i1 = TREE_IMAGPART (arg1);
1324 tree r2 = TREE_REALPART (arg2);
1325 tree i2 = TREE_IMAGPART (arg2);
1326 tree t;
1327
1328 switch (code)
1329 {
1330 case PLUS_EXPR:
1331 t = build_complex (type,
1332 const_binop (PLUS_EXPR, r1, r2, notrunc),
1333 const_binop (PLUS_EXPR, i1, i2, notrunc));
1334 break;
1335
1336 case MINUS_EXPR:
1337 t = build_complex (type,
1338 const_binop (MINUS_EXPR, r1, r2, notrunc),
1339 const_binop (MINUS_EXPR, i1, i2, notrunc));
1340 break;
1341
1342 case MULT_EXPR:
1343 t = build_complex (type,
1344 const_binop (MINUS_EXPR,
1345 const_binop (MULT_EXPR,
1346 r1, r2, notrunc),
1347 const_binop (MULT_EXPR,
1348 i1, i2, notrunc),
1349 notrunc),
1350 const_binop (PLUS_EXPR,
1351 const_binop (MULT_EXPR,
1352 r1, i2, notrunc),
1353 const_binop (MULT_EXPR,
1354 i1, r2, notrunc),
1355 notrunc));
1356 break;
1357
1358 case RDIV_EXPR:
1359 {
1360 tree magsquared
1361 = const_binop (PLUS_EXPR,
1362 const_binop (MULT_EXPR, r2, r2, notrunc),
1363 const_binop (MULT_EXPR, i2, i2, notrunc),
1364 notrunc);
1365
1366 t = build_complex (type,
1367 const_binop
1368 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1369 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1370 const_binop (PLUS_EXPR,
1371 const_binop (MULT_EXPR, r1, r2,
1372 notrunc),
1373 const_binop (MULT_EXPR, i1, i2,
1374 notrunc),
1375 notrunc),
1376 magsquared, notrunc),
1377 const_binop
1378 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1379 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1380 const_binop (MINUS_EXPR,
1381 const_binop (MULT_EXPR, i1, r2,
1382 notrunc),
1383 const_binop (MULT_EXPR, r1, i2,
1384 notrunc),
1385 notrunc),
1386 magsquared, notrunc));
1387 }
1388 break;
1389
1390 default:
1391 abort ();
1392 }
1393 return t;
1394 }
1395 return 0;
1396 }
1397
1398 /* These are the hash table functions for the hash table of INTEGER_CST
1399 nodes of a sizetype. */
1400
1401 /* Return the hash code code X, an INTEGER_CST. */
1402
1403 static hashval_t
1404 size_htab_hash (const void *x)
1405 {
1406 tree t = (tree) x;
1407
1408 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1409 ^ htab_hash_pointer (TREE_TYPE (t))
1410 ^ (TREE_OVERFLOW (t) << 20));
1411 }
1412
1413 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1414 is the same as that given by *Y, which is the same. */
1415
1416 static int
1417 size_htab_eq (const void *x, const void *y)
1418 {
1419 tree xt = (tree) x;
1420 tree yt = (tree) y;
1421
1422 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1423 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1424 && TREE_TYPE (xt) == TREE_TYPE (yt)
1425 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1426 }
1427 \f
1428 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1429 bits are given by NUMBER and of the sizetype represented by KIND. */
1430
1431 tree
1432 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1433 {
1434 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1435 }
1436
1437 /* Likewise, but the desired type is specified explicitly. */
1438
1439 static GTY (()) tree new_const;
1440 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1441 htab_t size_htab;
1442
1443 tree
1444 size_int_type_wide (HOST_WIDE_INT number, tree type)
1445 {
1446 void **slot;
1447
1448 if (size_htab == 0)
1449 {
1450 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1451 new_const = make_node (INTEGER_CST);
1452 }
1453
1454 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1455 hash table, we return the value from the hash table. Otherwise, we
1456 place that in the hash table and make a new node for the next time. */
1457 TREE_INT_CST_LOW (new_const) = number;
1458 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1459 TREE_TYPE (new_const) = type;
1460 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1461 = force_fit_type (new_const, 0);
1462
1463 slot = htab_find_slot (size_htab, new_const, INSERT);
1464 if (*slot == 0)
1465 {
1466 tree t = new_const;
1467
1468 *slot = new_const;
1469 new_const = make_node (INTEGER_CST);
1470 return t;
1471 }
1472 else
1473 return (tree) *slot;
1474 }
1475
1476 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1477 is a tree code. The type of the result is taken from the operands.
1478 Both must be the same type integer type and it must be a size type.
1479 If the operands are constant, so is the result. */
1480
1481 tree
1482 size_binop (enum tree_code code, tree arg0, tree arg1)
1483 {
1484 tree type = TREE_TYPE (arg0);
1485
1486 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1487 || type != TREE_TYPE (arg1))
1488 abort ();
1489
1490 /* Handle the special case of two integer constants faster. */
1491 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1492 {
1493 /* And some specific cases even faster than that. */
1494 if (code == PLUS_EXPR && integer_zerop (arg0))
1495 return arg1;
1496 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1497 && integer_zerop (arg1))
1498 return arg0;
1499 else if (code == MULT_EXPR && integer_onep (arg0))
1500 return arg1;
1501
1502 /* Handle general case of two integer constants. */
1503 return int_const_binop (code, arg0, arg1, 0);
1504 }
1505
1506 if (arg0 == error_mark_node || arg1 == error_mark_node)
1507 return error_mark_node;
1508
1509 return fold (build (code, type, arg0, arg1));
1510 }
1511
1512 /* Given two values, either both of sizetype or both of bitsizetype,
1513 compute the difference between the two values. Return the value
1514 in signed type corresponding to the type of the operands. */
1515
1516 tree
1517 size_diffop (tree arg0, tree arg1)
1518 {
1519 tree type = TREE_TYPE (arg0);
1520 tree ctype;
1521
1522 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1523 || type != TREE_TYPE (arg1))
1524 abort ();
1525
1526 /* If the type is already signed, just do the simple thing. */
1527 if (! TREE_UNSIGNED (type))
1528 return size_binop (MINUS_EXPR, arg0, arg1);
1529
1530 ctype = (type == bitsizetype || type == ubitsizetype
1531 ? sbitsizetype : ssizetype);
1532
1533 /* If either operand is not a constant, do the conversions to the signed
1534 type and subtract. The hardware will do the right thing with any
1535 overflow in the subtraction. */
1536 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1537 return size_binop (MINUS_EXPR, convert (ctype, arg0),
1538 convert (ctype, arg1));
1539
1540 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1541 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1542 overflow) and negate (which can't either). Special-case a result
1543 of zero while we're here. */
1544 if (tree_int_cst_equal (arg0, arg1))
1545 return convert (ctype, integer_zero_node);
1546 else if (tree_int_cst_lt (arg1, arg0))
1547 return convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1548 else
1549 return size_binop (MINUS_EXPR, convert (ctype, integer_zero_node),
1550 convert (ctype, size_binop (MINUS_EXPR, arg1, arg0)));
1551 }
1552 \f
1553
1554 /* Given T, a tree representing type conversion of ARG1, a constant,
1555 return a constant tree representing the result of conversion. */
1556
1557 static tree
1558 fold_convert (tree t, tree arg1)
1559 {
1560 tree type = TREE_TYPE (t);
1561 int overflow = 0;
1562
1563 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1564 {
1565 if (TREE_CODE (arg1) == INTEGER_CST)
1566 {
1567 /* If we would build a constant wider than GCC supports,
1568 leave the conversion unfolded. */
1569 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1570 return t;
1571
1572 /* If we are trying to make a sizetype for a small integer, use
1573 size_int to pick up cached types to reduce duplicate nodes. */
1574 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1575 && !TREE_CONSTANT_OVERFLOW (arg1)
1576 && compare_tree_int (arg1, 10000) < 0)
1577 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1578
1579 /* Given an integer constant, make new constant with new type,
1580 appropriately sign-extended or truncated. */
1581 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1582 TREE_INT_CST_HIGH (arg1));
1583 TREE_TYPE (t) = type;
1584 /* Indicate an overflow if (1) ARG1 already overflowed,
1585 or (2) force_fit_type indicates an overflow.
1586 Tell force_fit_type that an overflow has already occurred
1587 if ARG1 is a too-large unsigned value and T is signed.
1588 But don't indicate an overflow if converting a pointer. */
1589 TREE_OVERFLOW (t)
1590 = ((force_fit_type (t,
1591 (TREE_INT_CST_HIGH (arg1) < 0
1592 && (TREE_UNSIGNED (type)
1593 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1594 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1595 || TREE_OVERFLOW (arg1));
1596 TREE_CONSTANT_OVERFLOW (t)
1597 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1598 }
1599 else if (TREE_CODE (arg1) == REAL_CST)
1600 {
1601 /* Don't initialize these, use assignments.
1602 Initialized local aggregates don't work on old compilers. */
1603 REAL_VALUE_TYPE x;
1604 REAL_VALUE_TYPE l;
1605 REAL_VALUE_TYPE u;
1606 tree type1 = TREE_TYPE (arg1);
1607 int no_upper_bound;
1608
1609 x = TREE_REAL_CST (arg1);
1610 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1611
1612 no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
1613 if (!no_upper_bound)
1614 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1615
1616 /* See if X will be in range after truncation towards 0.
1617 To compensate for truncation, move the bounds away from 0,
1618 but reject if X exactly equals the adjusted bounds. */
1619 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1620 if (!no_upper_bound)
1621 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1622 /* If X is a NaN, use zero instead and show we have an overflow.
1623 Otherwise, range check. */
1624 if (REAL_VALUE_ISNAN (x))
1625 overflow = 1, x = dconst0;
1626 else if (! (REAL_VALUES_LESS (l, x)
1627 && !no_upper_bound
1628 && REAL_VALUES_LESS (x, u)))
1629 overflow = 1;
1630
1631 {
1632 HOST_WIDE_INT low, high;
1633 REAL_VALUE_TO_INT (&low, &high, x);
1634 t = build_int_2 (low, high);
1635 }
1636 TREE_TYPE (t) = type;
1637 TREE_OVERFLOW (t)
1638 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1639 TREE_CONSTANT_OVERFLOW (t)
1640 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1641 }
1642 TREE_TYPE (t) = type;
1643 }
1644 else if (TREE_CODE (type) == REAL_TYPE)
1645 {
1646 if (TREE_CODE (arg1) == INTEGER_CST)
1647 return build_real_from_int_cst (type, arg1);
1648 if (TREE_CODE (arg1) == REAL_CST)
1649 {
1650 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1651 {
1652 /* We make a copy of ARG1 so that we don't modify an
1653 existing constant tree. */
1654 t = copy_node (arg1);
1655 TREE_TYPE (t) = type;
1656 return t;
1657 }
1658
1659 t = build_real (type,
1660 real_value_truncate (TYPE_MODE (type),
1661 TREE_REAL_CST (arg1)));
1662
1663 TREE_OVERFLOW (t)
1664 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1665 TREE_CONSTANT_OVERFLOW (t)
1666 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1667 return t;
1668 }
1669 }
1670 TREE_CONSTANT (t) = 1;
1671 return t;
1672 }
1673 \f
1674 /* Return an expr equal to X but certainly not valid as an lvalue. */
1675
1676 tree
1677 non_lvalue (tree x)
1678 {
1679 tree result;
1680
1681 /* These things are certainly not lvalues. */
1682 if (TREE_CODE (x) == NON_LVALUE_EXPR
1683 || TREE_CODE (x) == INTEGER_CST
1684 || TREE_CODE (x) == REAL_CST
1685 || TREE_CODE (x) == STRING_CST
1686 || TREE_CODE (x) == ADDR_EXPR)
1687 return x;
1688
1689 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1690 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1691 return result;
1692 }
1693
1694 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1695 Zero means allow extended lvalues. */
1696
1697 int pedantic_lvalues;
1698
1699 /* When pedantic, return an expr equal to X but certainly not valid as a
1700 pedantic lvalue. Otherwise, return X. */
1701
1702 tree
1703 pedantic_non_lvalue (tree x)
1704 {
1705 if (pedantic_lvalues)
1706 return non_lvalue (x);
1707 else
1708 return x;
1709 }
1710 \f
1711 /* Given a tree comparison code, return the code that is the logical inverse
1712 of the given code. It is not safe to do this for floating-point
1713 comparisons, except for NE_EXPR and EQ_EXPR. */
1714
1715 static enum tree_code
1716 invert_tree_comparison (enum tree_code code)
1717 {
1718 switch (code)
1719 {
1720 case EQ_EXPR:
1721 return NE_EXPR;
1722 case NE_EXPR:
1723 return EQ_EXPR;
1724 case GT_EXPR:
1725 return LE_EXPR;
1726 case GE_EXPR:
1727 return LT_EXPR;
1728 case LT_EXPR:
1729 return GE_EXPR;
1730 case LE_EXPR:
1731 return GT_EXPR;
1732 default:
1733 abort ();
1734 }
1735 }
1736
1737 /* Similar, but return the comparison that results if the operands are
1738 swapped. This is safe for floating-point. */
1739
1740 static enum tree_code
1741 swap_tree_comparison (enum tree_code code)
1742 {
1743 switch (code)
1744 {
1745 case EQ_EXPR:
1746 case NE_EXPR:
1747 return code;
1748 case GT_EXPR:
1749 return LT_EXPR;
1750 case GE_EXPR:
1751 return LE_EXPR;
1752 case LT_EXPR:
1753 return GT_EXPR;
1754 case LE_EXPR:
1755 return GE_EXPR;
1756 default:
1757 abort ();
1758 }
1759 }
1760
1761
1762 /* Convert a comparison tree code from an enum tree_code representation
1763 into a compcode bit-based encoding. This function is the inverse of
1764 compcode_to_comparison. */
1765
1766 static int
1767 comparison_to_compcode (enum tree_code code)
1768 {
1769 switch (code)
1770 {
1771 case LT_EXPR:
1772 return COMPCODE_LT;
1773 case EQ_EXPR:
1774 return COMPCODE_EQ;
1775 case LE_EXPR:
1776 return COMPCODE_LE;
1777 case GT_EXPR:
1778 return COMPCODE_GT;
1779 case NE_EXPR:
1780 return COMPCODE_NE;
1781 case GE_EXPR:
1782 return COMPCODE_GE;
1783 default:
1784 abort ();
1785 }
1786 }
1787
1788 /* Convert a compcode bit-based encoding of a comparison operator back
1789 to GCC's enum tree_code representation. This function is the
1790 inverse of comparison_to_compcode. */
1791
1792 static enum tree_code
1793 compcode_to_comparison (int code)
1794 {
1795 switch (code)
1796 {
1797 case COMPCODE_LT:
1798 return LT_EXPR;
1799 case COMPCODE_EQ:
1800 return EQ_EXPR;
1801 case COMPCODE_LE:
1802 return LE_EXPR;
1803 case COMPCODE_GT:
1804 return GT_EXPR;
1805 case COMPCODE_NE:
1806 return NE_EXPR;
1807 case COMPCODE_GE:
1808 return GE_EXPR;
1809 default:
1810 abort ();
1811 }
1812 }
1813
1814 /* Return nonzero if CODE is a tree code that represents a truth value. */
1815
1816 static int
1817 truth_value_p (enum tree_code code)
1818 {
1819 return (TREE_CODE_CLASS (code) == '<'
1820 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1821 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1822 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1823 }
1824 \f
1825 /* Return nonzero if two operands are necessarily equal.
1826 If ONLY_CONST is nonzero, only return nonzero for constants.
1827 This function tests whether the operands are indistinguishable;
1828 it does not test whether they are equal using C's == operation.
1829 The distinction is important for IEEE floating point, because
1830 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1831 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1832
1833 int
1834 operand_equal_p (tree arg0, tree arg1, int only_const)
1835 {
1836 tree fndecl;
1837
1838 /* If both types don't have the same signedness, then we can't consider
1839 them equal. We must check this before the STRIP_NOPS calls
1840 because they may change the signedness of the arguments. */
1841 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1842 return 0;
1843
1844 STRIP_NOPS (arg0);
1845 STRIP_NOPS (arg1);
1846
1847 if (TREE_CODE (arg0) != TREE_CODE (arg1)
1848 /* This is needed for conversions and for COMPONENT_REF.
1849 Might as well play it safe and always test this. */
1850 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
1851 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
1852 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1853 return 0;
1854
1855 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1856 We don't care about side effects in that case because the SAVE_EXPR
1857 takes care of that for us. In all other cases, two expressions are
1858 equal if they have no side effects. If we have two identical
1859 expressions with side effects that should be treated the same due
1860 to the only side effects being identical SAVE_EXPR's, that will
1861 be detected in the recursive calls below. */
1862 if (arg0 == arg1 && ! only_const
1863 && (TREE_CODE (arg0) == SAVE_EXPR
1864 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
1865 return 1;
1866
1867 /* Next handle constant cases, those for which we can return 1 even
1868 if ONLY_CONST is set. */
1869 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
1870 switch (TREE_CODE (arg0))
1871 {
1872 case INTEGER_CST:
1873 return (! TREE_CONSTANT_OVERFLOW (arg0)
1874 && ! TREE_CONSTANT_OVERFLOW (arg1)
1875 && tree_int_cst_equal (arg0, arg1));
1876
1877 case REAL_CST:
1878 return (! TREE_CONSTANT_OVERFLOW (arg0)
1879 && ! TREE_CONSTANT_OVERFLOW (arg1)
1880 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
1881 TREE_REAL_CST (arg1)));
1882
1883 case VECTOR_CST:
1884 {
1885 tree v1, v2;
1886
1887 if (TREE_CONSTANT_OVERFLOW (arg0)
1888 || TREE_CONSTANT_OVERFLOW (arg1))
1889 return 0;
1890
1891 v1 = TREE_VECTOR_CST_ELTS (arg0);
1892 v2 = TREE_VECTOR_CST_ELTS (arg1);
1893 while (v1 && v2)
1894 {
1895 if (!operand_equal_p (v1, v2, only_const))
1896 return 0;
1897 v1 = TREE_CHAIN (v1);
1898 v2 = TREE_CHAIN (v2);
1899 }
1900
1901 return 1;
1902 }
1903
1904 case COMPLEX_CST:
1905 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
1906 only_const)
1907 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
1908 only_const));
1909
1910 case STRING_CST:
1911 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
1912 && ! memcmp (TREE_STRING_POINTER (arg0),
1913 TREE_STRING_POINTER (arg1),
1914 TREE_STRING_LENGTH (arg0)));
1915
1916 case ADDR_EXPR:
1917 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
1918 0);
1919 default:
1920 break;
1921 }
1922
1923 if (only_const)
1924 return 0;
1925
1926 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1927 {
1928 case '1':
1929 /* Two conversions are equal only if signedness and modes match. */
1930 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1931 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1932 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1933 return 0;
1934
1935 return operand_equal_p (TREE_OPERAND (arg0, 0),
1936 TREE_OPERAND (arg1, 0), 0);
1937
1938 case '<':
1939 case '2':
1940 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
1941 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
1942 0))
1943 return 1;
1944
1945 /* For commutative ops, allow the other order. */
1946 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
1947 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
1948 || TREE_CODE (arg0) == BIT_IOR_EXPR
1949 || TREE_CODE (arg0) == BIT_XOR_EXPR
1950 || TREE_CODE (arg0) == BIT_AND_EXPR
1951 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
1952 && operand_equal_p (TREE_OPERAND (arg0, 0),
1953 TREE_OPERAND (arg1, 1), 0)
1954 && operand_equal_p (TREE_OPERAND (arg0, 1),
1955 TREE_OPERAND (arg1, 0), 0));
1956
1957 case 'r':
1958 /* If either of the pointer (or reference) expressions we are
1959 dereferencing contain a side effect, these cannot be equal. */
1960 if (TREE_SIDE_EFFECTS (arg0)
1961 || TREE_SIDE_EFFECTS (arg1))
1962 return 0;
1963
1964 switch (TREE_CODE (arg0))
1965 {
1966 case INDIRECT_REF:
1967 return operand_equal_p (TREE_OPERAND (arg0, 0),
1968 TREE_OPERAND (arg1, 0), 0);
1969
1970 case COMPONENT_REF:
1971 case ARRAY_REF:
1972 case ARRAY_RANGE_REF:
1973 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1974 TREE_OPERAND (arg1, 0), 0)
1975 && operand_equal_p (TREE_OPERAND (arg0, 1),
1976 TREE_OPERAND (arg1, 1), 0));
1977
1978 case BIT_FIELD_REF:
1979 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1980 TREE_OPERAND (arg1, 0), 0)
1981 && operand_equal_p (TREE_OPERAND (arg0, 1),
1982 TREE_OPERAND (arg1, 1), 0)
1983 && operand_equal_p (TREE_OPERAND (arg0, 2),
1984 TREE_OPERAND (arg1, 2), 0));
1985 default:
1986 return 0;
1987 }
1988
1989 case 'e':
1990 switch (TREE_CODE (arg0))
1991 {
1992 case ADDR_EXPR:
1993 case TRUTH_NOT_EXPR:
1994 return operand_equal_p (TREE_OPERAND (arg0, 0),
1995 TREE_OPERAND (arg1, 0), 0);
1996
1997 case RTL_EXPR:
1998 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
1999
2000 case CALL_EXPR:
2001 /* If the CALL_EXPRs call different functions, then they
2002 clearly can not be equal. */
2003 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2004 TREE_OPERAND (arg1, 0), 0))
2005 return 0;
2006
2007 /* Only consider const functions equivalent. */
2008 fndecl = get_callee_fndecl (arg0);
2009 if (fndecl == NULL_TREE
2010 || ! (flags_from_decl_or_type (fndecl) & ECF_CONST))
2011 return 0;
2012
2013 /* Now see if all the arguments are the same. operand_equal_p
2014 does not handle TREE_LIST, so we walk the operands here
2015 feeding them to operand_equal_p. */
2016 arg0 = TREE_OPERAND (arg0, 1);
2017 arg1 = TREE_OPERAND (arg1, 1);
2018 while (arg0 && arg1)
2019 {
2020 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), 0))
2021 return 0;
2022
2023 arg0 = TREE_CHAIN (arg0);
2024 arg1 = TREE_CHAIN (arg1);
2025 }
2026
2027 /* If we get here and both argument lists are exhausted
2028 then the CALL_EXPRs are equal. */
2029 return ! (arg0 || arg1);
2030
2031 default:
2032 return 0;
2033 }
2034
2035 case 'd':
2036 /* Consider __builtin_sqrt equal to sqrt. */
2037 return TREE_CODE (arg0) == FUNCTION_DECL
2038 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2039 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2040 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1);
2041
2042 default:
2043 return 0;
2044 }
2045 }
2046 \f
2047 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2048 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2049
2050 When in doubt, return 0. */
2051
2052 static int
2053 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2054 {
2055 int unsignedp1, unsignedpo;
2056 tree primarg0, primarg1, primother;
2057 unsigned int correct_width;
2058
2059 if (operand_equal_p (arg0, arg1, 0))
2060 return 1;
2061
2062 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2063 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2064 return 0;
2065
2066 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2067 and see if the inner values are the same. This removes any
2068 signedness comparison, which doesn't matter here. */
2069 primarg0 = arg0, primarg1 = arg1;
2070 STRIP_NOPS (primarg0);
2071 STRIP_NOPS (primarg1);
2072 if (operand_equal_p (primarg0, primarg1, 0))
2073 return 1;
2074
2075 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2076 actual comparison operand, ARG0.
2077
2078 First throw away any conversions to wider types
2079 already present in the operands. */
2080
2081 primarg1 = get_narrower (arg1, &unsignedp1);
2082 primother = get_narrower (other, &unsignedpo);
2083
2084 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2085 if (unsignedp1 == unsignedpo
2086 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2087 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2088 {
2089 tree type = TREE_TYPE (arg0);
2090
2091 /* Make sure shorter operand is extended the right way
2092 to match the longer operand. */
2093 primarg1 = convert ((*lang_hooks.types.signed_or_unsigned_type)
2094 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2095
2096 if (operand_equal_p (arg0, convert (type, primarg1), 0))
2097 return 1;
2098 }
2099
2100 return 0;
2101 }
2102 \f
2103 /* See if ARG is an expression that is either a comparison or is performing
2104 arithmetic on comparisons. The comparisons must only be comparing
2105 two different values, which will be stored in *CVAL1 and *CVAL2; if
2106 they are nonzero it means that some operands have already been found.
2107 No variables may be used anywhere else in the expression except in the
2108 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2109 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2110
2111 If this is true, return 1. Otherwise, return zero. */
2112
2113 static int
2114 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2115 {
2116 enum tree_code code = TREE_CODE (arg);
2117 char class = TREE_CODE_CLASS (code);
2118
2119 /* We can handle some of the 'e' cases here. */
2120 if (class == 'e' && code == TRUTH_NOT_EXPR)
2121 class = '1';
2122 else if (class == 'e'
2123 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2124 || code == COMPOUND_EXPR))
2125 class = '2';
2126
2127 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2128 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2129 {
2130 /* If we've already found a CVAL1 or CVAL2, this expression is
2131 two complex to handle. */
2132 if (*cval1 || *cval2)
2133 return 0;
2134
2135 class = '1';
2136 *save_p = 1;
2137 }
2138
2139 switch (class)
2140 {
2141 case '1':
2142 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2143
2144 case '2':
2145 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2146 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2147 cval1, cval2, save_p));
2148
2149 case 'c':
2150 return 1;
2151
2152 case 'e':
2153 if (code == COND_EXPR)
2154 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2155 cval1, cval2, save_p)
2156 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2157 cval1, cval2, save_p)
2158 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2159 cval1, cval2, save_p));
2160 return 0;
2161
2162 case '<':
2163 /* First see if we can handle the first operand, then the second. For
2164 the second operand, we know *CVAL1 can't be zero. It must be that
2165 one side of the comparison is each of the values; test for the
2166 case where this isn't true by failing if the two operands
2167 are the same. */
2168
2169 if (operand_equal_p (TREE_OPERAND (arg, 0),
2170 TREE_OPERAND (arg, 1), 0))
2171 return 0;
2172
2173 if (*cval1 == 0)
2174 *cval1 = TREE_OPERAND (arg, 0);
2175 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2176 ;
2177 else if (*cval2 == 0)
2178 *cval2 = TREE_OPERAND (arg, 0);
2179 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2180 ;
2181 else
2182 return 0;
2183
2184 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2185 ;
2186 else if (*cval2 == 0)
2187 *cval2 = TREE_OPERAND (arg, 1);
2188 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2189 ;
2190 else
2191 return 0;
2192
2193 return 1;
2194
2195 default:
2196 return 0;
2197 }
2198 }
2199 \f
2200 /* ARG is a tree that is known to contain just arithmetic operations and
2201 comparisons. Evaluate the operations in the tree substituting NEW0 for
2202 any occurrence of OLD0 as an operand of a comparison and likewise for
2203 NEW1 and OLD1. */
2204
2205 static tree
2206 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2207 {
2208 tree type = TREE_TYPE (arg);
2209 enum tree_code code = TREE_CODE (arg);
2210 char class = TREE_CODE_CLASS (code);
2211
2212 /* We can handle some of the 'e' cases here. */
2213 if (class == 'e' && code == TRUTH_NOT_EXPR)
2214 class = '1';
2215 else if (class == 'e'
2216 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2217 class = '2';
2218
2219 switch (class)
2220 {
2221 case '1':
2222 return fold (build1 (code, type,
2223 eval_subst (TREE_OPERAND (arg, 0),
2224 old0, new0, old1, new1)));
2225
2226 case '2':
2227 return fold (build (code, type,
2228 eval_subst (TREE_OPERAND (arg, 0),
2229 old0, new0, old1, new1),
2230 eval_subst (TREE_OPERAND (arg, 1),
2231 old0, new0, old1, new1)));
2232
2233 case 'e':
2234 switch (code)
2235 {
2236 case SAVE_EXPR:
2237 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2238
2239 case COMPOUND_EXPR:
2240 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2241
2242 case COND_EXPR:
2243 return fold (build (code, type,
2244 eval_subst (TREE_OPERAND (arg, 0),
2245 old0, new0, old1, new1),
2246 eval_subst (TREE_OPERAND (arg, 1),
2247 old0, new0, old1, new1),
2248 eval_subst (TREE_OPERAND (arg, 2),
2249 old0, new0, old1, new1)));
2250 default:
2251 break;
2252 }
2253 /* Fall through - ??? */
2254
2255 case '<':
2256 {
2257 tree arg0 = TREE_OPERAND (arg, 0);
2258 tree arg1 = TREE_OPERAND (arg, 1);
2259
2260 /* We need to check both for exact equality and tree equality. The
2261 former will be true if the operand has a side-effect. In that
2262 case, we know the operand occurred exactly once. */
2263
2264 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2265 arg0 = new0;
2266 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2267 arg0 = new1;
2268
2269 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2270 arg1 = new0;
2271 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2272 arg1 = new1;
2273
2274 return fold (build (code, type, arg0, arg1));
2275 }
2276
2277 default:
2278 return arg;
2279 }
2280 }
2281 \f
2282 /* Return a tree for the case when the result of an expression is RESULT
2283 converted to TYPE and OMITTED was previously an operand of the expression
2284 but is now not needed (e.g., we folded OMITTED * 0).
2285
2286 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2287 the conversion of RESULT to TYPE. */
2288
2289 tree
2290 omit_one_operand (tree type, tree result, tree omitted)
2291 {
2292 tree t = convert (type, result);
2293
2294 if (TREE_SIDE_EFFECTS (omitted))
2295 return build (COMPOUND_EXPR, type, omitted, t);
2296
2297 return non_lvalue (t);
2298 }
2299
2300 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2301
2302 static tree
2303 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2304 {
2305 tree t = convert (type, result);
2306
2307 if (TREE_SIDE_EFFECTS (omitted))
2308 return build (COMPOUND_EXPR, type, omitted, t);
2309
2310 return pedantic_non_lvalue (t);
2311 }
2312 \f
2313 /* Return a simplified tree node for the truth-negation of ARG. This
2314 never alters ARG itself. We assume that ARG is an operation that
2315 returns a truth value (0 or 1). */
2316
2317 tree
2318 invert_truthvalue (tree arg)
2319 {
2320 tree type = TREE_TYPE (arg);
2321 enum tree_code code = TREE_CODE (arg);
2322
2323 if (code == ERROR_MARK)
2324 return arg;
2325
2326 /* If this is a comparison, we can simply invert it, except for
2327 floating-point non-equality comparisons, in which case we just
2328 enclose a TRUTH_NOT_EXPR around what we have. */
2329
2330 if (TREE_CODE_CLASS (code) == '<')
2331 {
2332 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2333 && !flag_unsafe_math_optimizations
2334 && code != NE_EXPR
2335 && code != EQ_EXPR)
2336 return build1 (TRUTH_NOT_EXPR, type, arg);
2337 else
2338 return build (invert_tree_comparison (code), type,
2339 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2340 }
2341
2342 switch (code)
2343 {
2344 case INTEGER_CST:
2345 return convert (type, build_int_2 (integer_zerop (arg), 0));
2346
2347 case TRUTH_AND_EXPR:
2348 return build (TRUTH_OR_EXPR, type,
2349 invert_truthvalue (TREE_OPERAND (arg, 0)),
2350 invert_truthvalue (TREE_OPERAND (arg, 1)));
2351
2352 case TRUTH_OR_EXPR:
2353 return build (TRUTH_AND_EXPR, type,
2354 invert_truthvalue (TREE_OPERAND (arg, 0)),
2355 invert_truthvalue (TREE_OPERAND (arg, 1)));
2356
2357 case TRUTH_XOR_EXPR:
2358 /* Here we can invert either operand. We invert the first operand
2359 unless the second operand is a TRUTH_NOT_EXPR in which case our
2360 result is the XOR of the first operand with the inside of the
2361 negation of the second operand. */
2362
2363 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2364 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2365 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2366 else
2367 return build (TRUTH_XOR_EXPR, type,
2368 invert_truthvalue (TREE_OPERAND (arg, 0)),
2369 TREE_OPERAND (arg, 1));
2370
2371 case TRUTH_ANDIF_EXPR:
2372 return build (TRUTH_ORIF_EXPR, type,
2373 invert_truthvalue (TREE_OPERAND (arg, 0)),
2374 invert_truthvalue (TREE_OPERAND (arg, 1)));
2375
2376 case TRUTH_ORIF_EXPR:
2377 return build (TRUTH_ANDIF_EXPR, type,
2378 invert_truthvalue (TREE_OPERAND (arg, 0)),
2379 invert_truthvalue (TREE_OPERAND (arg, 1)));
2380
2381 case TRUTH_NOT_EXPR:
2382 return TREE_OPERAND (arg, 0);
2383
2384 case COND_EXPR:
2385 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2386 invert_truthvalue (TREE_OPERAND (arg, 1)),
2387 invert_truthvalue (TREE_OPERAND (arg, 2)));
2388
2389 case COMPOUND_EXPR:
2390 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2391 invert_truthvalue (TREE_OPERAND (arg, 1)));
2392
2393 case WITH_RECORD_EXPR:
2394 return build (WITH_RECORD_EXPR, type,
2395 invert_truthvalue (TREE_OPERAND (arg, 0)),
2396 TREE_OPERAND (arg, 1));
2397
2398 case NON_LVALUE_EXPR:
2399 return invert_truthvalue (TREE_OPERAND (arg, 0));
2400
2401 case NOP_EXPR:
2402 case CONVERT_EXPR:
2403 case FLOAT_EXPR:
2404 return build1 (TREE_CODE (arg), type,
2405 invert_truthvalue (TREE_OPERAND (arg, 0)));
2406
2407 case BIT_AND_EXPR:
2408 if (!integer_onep (TREE_OPERAND (arg, 1)))
2409 break;
2410 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2411
2412 case SAVE_EXPR:
2413 return build1 (TRUTH_NOT_EXPR, type, arg);
2414
2415 case CLEANUP_POINT_EXPR:
2416 return build1 (CLEANUP_POINT_EXPR, type,
2417 invert_truthvalue (TREE_OPERAND (arg, 0)));
2418
2419 default:
2420 break;
2421 }
2422 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2423 abort ();
2424 return build1 (TRUTH_NOT_EXPR, type, arg);
2425 }
2426
2427 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2428 operands are another bit-wise operation with a common input. If so,
2429 distribute the bit operations to save an operation and possibly two if
2430 constants are involved. For example, convert
2431 (A | B) & (A | C) into A | (B & C)
2432 Further simplification will occur if B and C are constants.
2433
2434 If this optimization cannot be done, 0 will be returned. */
2435
2436 static tree
2437 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2438 {
2439 tree common;
2440 tree left, right;
2441
2442 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2443 || TREE_CODE (arg0) == code
2444 || (TREE_CODE (arg0) != BIT_AND_EXPR
2445 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2446 return 0;
2447
2448 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2449 {
2450 common = TREE_OPERAND (arg0, 0);
2451 left = TREE_OPERAND (arg0, 1);
2452 right = TREE_OPERAND (arg1, 1);
2453 }
2454 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2455 {
2456 common = TREE_OPERAND (arg0, 0);
2457 left = TREE_OPERAND (arg0, 1);
2458 right = TREE_OPERAND (arg1, 0);
2459 }
2460 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2461 {
2462 common = TREE_OPERAND (arg0, 1);
2463 left = TREE_OPERAND (arg0, 0);
2464 right = TREE_OPERAND (arg1, 1);
2465 }
2466 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2467 {
2468 common = TREE_OPERAND (arg0, 1);
2469 left = TREE_OPERAND (arg0, 0);
2470 right = TREE_OPERAND (arg1, 0);
2471 }
2472 else
2473 return 0;
2474
2475 return fold (build (TREE_CODE (arg0), type, common,
2476 fold (build (code, type, left, right))));
2477 }
2478 \f
2479 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2480 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2481
2482 static tree
2483 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
2484 int unsignedp)
2485 {
2486 tree result = build (BIT_FIELD_REF, type, inner,
2487 size_int (bitsize), bitsize_int (bitpos));
2488
2489 TREE_UNSIGNED (result) = unsignedp;
2490
2491 return result;
2492 }
2493
2494 /* Optimize a bit-field compare.
2495
2496 There are two cases: First is a compare against a constant and the
2497 second is a comparison of two items where the fields are at the same
2498 bit position relative to the start of a chunk (byte, halfword, word)
2499 large enough to contain it. In these cases we can avoid the shift
2500 implicit in bitfield extractions.
2501
2502 For constants, we emit a compare of the shifted constant with the
2503 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2504 compared. For two fields at the same position, we do the ANDs with the
2505 similar mask and compare the result of the ANDs.
2506
2507 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2508 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2509 are the left and right operands of the comparison, respectively.
2510
2511 If the optimization described above can be done, we return the resulting
2512 tree. Otherwise we return zero. */
2513
2514 static tree
2515 optimize_bit_field_compare (enum tree_code code, tree compare_type,
2516 tree lhs, tree rhs)
2517 {
2518 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2519 tree type = TREE_TYPE (lhs);
2520 tree signed_type, unsigned_type;
2521 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2522 enum machine_mode lmode, rmode, nmode;
2523 int lunsignedp, runsignedp;
2524 int lvolatilep = 0, rvolatilep = 0;
2525 tree linner, rinner = NULL_TREE;
2526 tree mask;
2527 tree offset;
2528
2529 /* Get all the information about the extractions being done. If the bit size
2530 if the same as the size of the underlying object, we aren't doing an
2531 extraction at all and so can do nothing. We also don't want to
2532 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2533 then will no longer be able to replace it. */
2534 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2535 &lunsignedp, &lvolatilep);
2536 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2537 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2538 return 0;
2539
2540 if (!const_p)
2541 {
2542 /* If this is not a constant, we can only do something if bit positions,
2543 sizes, and signedness are the same. */
2544 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2545 &runsignedp, &rvolatilep);
2546
2547 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2548 || lunsignedp != runsignedp || offset != 0
2549 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2550 return 0;
2551 }
2552
2553 /* See if we can find a mode to refer to this field. We should be able to,
2554 but fail if we can't. */
2555 nmode = get_best_mode (lbitsize, lbitpos,
2556 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2557 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2558 TYPE_ALIGN (TREE_TYPE (rinner))),
2559 word_mode, lvolatilep || rvolatilep);
2560 if (nmode == VOIDmode)
2561 return 0;
2562
2563 /* Set signed and unsigned types of the precision of this mode for the
2564 shifts below. */
2565 signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2566 unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2567
2568 /* Compute the bit position and size for the new reference and our offset
2569 within it. If the new reference is the same size as the original, we
2570 won't optimize anything, so return zero. */
2571 nbitsize = GET_MODE_BITSIZE (nmode);
2572 nbitpos = lbitpos & ~ (nbitsize - 1);
2573 lbitpos -= nbitpos;
2574 if (nbitsize == lbitsize)
2575 return 0;
2576
2577 if (BYTES_BIG_ENDIAN)
2578 lbitpos = nbitsize - lbitsize - lbitpos;
2579
2580 /* Make the mask to be used against the extracted field. */
2581 mask = build_int_2 (~0, ~0);
2582 TREE_TYPE (mask) = unsigned_type;
2583 force_fit_type (mask, 0);
2584 mask = convert (unsigned_type, mask);
2585 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2586 mask = const_binop (RSHIFT_EXPR, mask,
2587 size_int (nbitsize - lbitsize - lbitpos), 0);
2588
2589 if (! const_p)
2590 /* If not comparing with constant, just rework the comparison
2591 and return. */
2592 return build (code, compare_type,
2593 build (BIT_AND_EXPR, unsigned_type,
2594 make_bit_field_ref (linner, unsigned_type,
2595 nbitsize, nbitpos, 1),
2596 mask),
2597 build (BIT_AND_EXPR, unsigned_type,
2598 make_bit_field_ref (rinner, unsigned_type,
2599 nbitsize, nbitpos, 1),
2600 mask));
2601
2602 /* Otherwise, we are handling the constant case. See if the constant is too
2603 big for the field. Warn and return a tree of for 0 (false) if so. We do
2604 this not only for its own sake, but to avoid having to test for this
2605 error case below. If we didn't, we might generate wrong code.
2606
2607 For unsigned fields, the constant shifted right by the field length should
2608 be all zero. For signed fields, the high-order bits should agree with
2609 the sign bit. */
2610
2611 if (lunsignedp)
2612 {
2613 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2614 convert (unsigned_type, rhs),
2615 size_int (lbitsize), 0)))
2616 {
2617 warning ("comparison is always %d due to width of bit-field",
2618 code == NE_EXPR);
2619 return convert (compare_type,
2620 (code == NE_EXPR
2621 ? integer_one_node : integer_zero_node));
2622 }
2623 }
2624 else
2625 {
2626 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2627 size_int (lbitsize - 1), 0);
2628 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2629 {
2630 warning ("comparison is always %d due to width of bit-field",
2631 code == NE_EXPR);
2632 return convert (compare_type,
2633 (code == NE_EXPR
2634 ? integer_one_node : integer_zero_node));
2635 }
2636 }
2637
2638 /* Single-bit compares should always be against zero. */
2639 if (lbitsize == 1 && ! integer_zerop (rhs))
2640 {
2641 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2642 rhs = convert (type, integer_zero_node);
2643 }
2644
2645 /* Make a new bitfield reference, shift the constant over the
2646 appropriate number of bits and mask it with the computed mask
2647 (in case this was a signed field). If we changed it, make a new one. */
2648 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2649 if (lvolatilep)
2650 {
2651 TREE_SIDE_EFFECTS (lhs) = 1;
2652 TREE_THIS_VOLATILE (lhs) = 1;
2653 }
2654
2655 rhs = fold (const_binop (BIT_AND_EXPR,
2656 const_binop (LSHIFT_EXPR,
2657 convert (unsigned_type, rhs),
2658 size_int (lbitpos), 0),
2659 mask, 0));
2660
2661 return build (code, compare_type,
2662 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2663 rhs);
2664 }
2665 \f
2666 /* Subroutine for fold_truthop: decode a field reference.
2667
2668 If EXP is a comparison reference, we return the innermost reference.
2669
2670 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2671 set to the starting bit number.
2672
2673 If the innermost field can be completely contained in a mode-sized
2674 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2675
2676 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2677 otherwise it is not changed.
2678
2679 *PUNSIGNEDP is set to the signedness of the field.
2680
2681 *PMASK is set to the mask used. This is either contained in a
2682 BIT_AND_EXPR or derived from the width of the field.
2683
2684 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2685
2686 Return 0 if this is not a component reference or is one that we can't
2687 do anything with. */
2688
2689 static tree
2690 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
2691 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
2692 int *punsignedp, int *pvolatilep,
2693 tree *pmask, tree *pand_mask)
2694 {
2695 tree outer_type = 0;
2696 tree and_mask = 0;
2697 tree mask, inner, offset;
2698 tree unsigned_type;
2699 unsigned int precision;
2700
2701 /* All the optimizations using this function assume integer fields.
2702 There are problems with FP fields since the type_for_size call
2703 below can fail for, e.g., XFmode. */
2704 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2705 return 0;
2706
2707 /* We are interested in the bare arrangement of bits, so strip everything
2708 that doesn't affect the machine mode. However, record the type of the
2709 outermost expression if it may matter below. */
2710 if (TREE_CODE (exp) == NOP_EXPR
2711 || TREE_CODE (exp) == CONVERT_EXPR
2712 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2713 outer_type = TREE_TYPE (exp);
2714 STRIP_NOPS (exp);
2715
2716 if (TREE_CODE (exp) == BIT_AND_EXPR)
2717 {
2718 and_mask = TREE_OPERAND (exp, 1);
2719 exp = TREE_OPERAND (exp, 0);
2720 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2721 if (TREE_CODE (and_mask) != INTEGER_CST)
2722 return 0;
2723 }
2724
2725 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2726 punsignedp, pvolatilep);
2727 if ((inner == exp && and_mask == 0)
2728 || *pbitsize < 0 || offset != 0
2729 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
2730 return 0;
2731
2732 /* If the number of bits in the reference is the same as the bitsize of
2733 the outer type, then the outer type gives the signedness. Otherwise
2734 (in case of a small bitfield) the signedness is unchanged. */
2735 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
2736 *punsignedp = TREE_UNSIGNED (outer_type);
2737
2738 /* Compute the mask to access the bitfield. */
2739 unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
2740 precision = TYPE_PRECISION (unsigned_type);
2741
2742 mask = build_int_2 (~0, ~0);
2743 TREE_TYPE (mask) = unsigned_type;
2744 force_fit_type (mask, 0);
2745 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2746 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2747
2748 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2749 if (and_mask != 0)
2750 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2751 convert (unsigned_type, and_mask), mask));
2752
2753 *pmask = mask;
2754 *pand_mask = and_mask;
2755 return inner;
2756 }
2757
2758 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
2759 bit positions. */
2760
2761 static int
2762 all_ones_mask_p (tree mask, int size)
2763 {
2764 tree type = TREE_TYPE (mask);
2765 unsigned int precision = TYPE_PRECISION (type);
2766 tree tmask;
2767
2768 tmask = build_int_2 (~0, ~0);
2769 TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
2770 force_fit_type (tmask, 0);
2771 return
2772 tree_int_cst_equal (mask,
2773 const_binop (RSHIFT_EXPR,
2774 const_binop (LSHIFT_EXPR, tmask,
2775 size_int (precision - size),
2776 0),
2777 size_int (precision - size), 0));
2778 }
2779
2780 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
2781 represents the sign bit of EXP's type. If EXP represents a sign
2782 or zero extension, also test VAL against the unextended type.
2783 The return value is the (sub)expression whose sign bit is VAL,
2784 or NULL_TREE otherwise. */
2785
2786 static tree
2787 sign_bit_p (tree exp, tree val)
2788 {
2789 unsigned HOST_WIDE_INT mask_lo, lo;
2790 HOST_WIDE_INT mask_hi, hi;
2791 int width;
2792 tree t;
2793
2794 /* Tree EXP must have an integral type. */
2795 t = TREE_TYPE (exp);
2796 if (! INTEGRAL_TYPE_P (t))
2797 return NULL_TREE;
2798
2799 /* Tree VAL must be an integer constant. */
2800 if (TREE_CODE (val) != INTEGER_CST
2801 || TREE_CONSTANT_OVERFLOW (val))
2802 return NULL_TREE;
2803
2804 width = TYPE_PRECISION (t);
2805 if (width > HOST_BITS_PER_WIDE_INT)
2806 {
2807 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
2808 lo = 0;
2809
2810 mask_hi = ((unsigned HOST_WIDE_INT) -1
2811 >> (2 * HOST_BITS_PER_WIDE_INT - width));
2812 mask_lo = -1;
2813 }
2814 else
2815 {
2816 hi = 0;
2817 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
2818
2819 mask_hi = 0;
2820 mask_lo = ((unsigned HOST_WIDE_INT) -1
2821 >> (HOST_BITS_PER_WIDE_INT - width));
2822 }
2823
2824 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
2825 treat VAL as if it were unsigned. */
2826 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
2827 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
2828 return exp;
2829
2830 /* Handle extension from a narrower type. */
2831 if (TREE_CODE (exp) == NOP_EXPR
2832 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
2833 return sign_bit_p (TREE_OPERAND (exp, 0), val);
2834
2835 return NULL_TREE;
2836 }
2837
2838 /* Subroutine for fold_truthop: determine if an operand is simple enough
2839 to be evaluated unconditionally. */
2840
2841 static int
2842 simple_operand_p (tree exp)
2843 {
2844 /* Strip any conversions that don't change the machine mode. */
2845 while ((TREE_CODE (exp) == NOP_EXPR
2846 || TREE_CODE (exp) == CONVERT_EXPR)
2847 && (TYPE_MODE (TREE_TYPE (exp))
2848 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2849 exp = TREE_OPERAND (exp, 0);
2850
2851 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2852 || (DECL_P (exp)
2853 && ! TREE_ADDRESSABLE (exp)
2854 && ! TREE_THIS_VOLATILE (exp)
2855 && ! DECL_NONLOCAL (exp)
2856 /* Don't regard global variables as simple. They may be
2857 allocated in ways unknown to the compiler (shared memory,
2858 #pragma weak, etc). */
2859 && ! TREE_PUBLIC (exp)
2860 && ! DECL_EXTERNAL (exp)
2861 /* Loading a static variable is unduly expensive, but global
2862 registers aren't expensive. */
2863 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2864 }
2865 \f
2866 /* The following functions are subroutines to fold_range_test and allow it to
2867 try to change a logical combination of comparisons into a range test.
2868
2869 For example, both
2870 X == 2 || X == 3 || X == 4 || X == 5
2871 and
2872 X >= 2 && X <= 5
2873 are converted to
2874 (unsigned) (X - 2) <= 3
2875
2876 We describe each set of comparisons as being either inside or outside
2877 a range, using a variable named like IN_P, and then describe the
2878 range with a lower and upper bound. If one of the bounds is omitted,
2879 it represents either the highest or lowest value of the type.
2880
2881 In the comments below, we represent a range by two numbers in brackets
2882 preceded by a "+" to designate being inside that range, or a "-" to
2883 designate being outside that range, so the condition can be inverted by
2884 flipping the prefix. An omitted bound is represented by a "-". For
2885 example, "- [-, 10]" means being outside the range starting at the lowest
2886 possible value and ending at 10, in other words, being greater than 10.
2887 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
2888 always false.
2889
2890 We set up things so that the missing bounds are handled in a consistent
2891 manner so neither a missing bound nor "true" and "false" need to be
2892 handled using a special case. */
2893
2894 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
2895 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
2896 and UPPER1_P are nonzero if the respective argument is an upper bound
2897 and zero for a lower. TYPE, if nonzero, is the type of the result; it
2898 must be specified for a comparison. ARG1 will be converted to ARG0's
2899 type if both are specified. */
2900
2901 static tree
2902 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
2903 tree arg1, int upper1_p)
2904 {
2905 tree tem;
2906 int result;
2907 int sgn0, sgn1;
2908
2909 /* If neither arg represents infinity, do the normal operation.
2910 Else, if not a comparison, return infinity. Else handle the special
2911 comparison rules. Note that most of the cases below won't occur, but
2912 are handled for consistency. */
2913
2914 if (arg0 != 0 && arg1 != 0)
2915 {
2916 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
2917 arg0, convert (TREE_TYPE (arg0), arg1)));
2918 STRIP_NOPS (tem);
2919 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
2920 }
2921
2922 if (TREE_CODE_CLASS (code) != '<')
2923 return 0;
2924
2925 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
2926 for neither. In real maths, we cannot assume open ended ranges are
2927 the same. But, this is computer arithmetic, where numbers are finite.
2928 We can therefore make the transformation of any unbounded range with
2929 the value Z, Z being greater than any representable number. This permits
2930 us to treat unbounded ranges as equal. */
2931 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
2932 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
2933 switch (code)
2934 {
2935 case EQ_EXPR:
2936 result = sgn0 == sgn1;
2937 break;
2938 case NE_EXPR:
2939 result = sgn0 != sgn1;
2940 break;
2941 case LT_EXPR:
2942 result = sgn0 < sgn1;
2943 break;
2944 case LE_EXPR:
2945 result = sgn0 <= sgn1;
2946 break;
2947 case GT_EXPR:
2948 result = sgn0 > sgn1;
2949 break;
2950 case GE_EXPR:
2951 result = sgn0 >= sgn1;
2952 break;
2953 default:
2954 abort ();
2955 }
2956
2957 return convert (type, result ? integer_one_node : integer_zero_node);
2958 }
2959 \f
2960 /* Given EXP, a logical expression, set the range it is testing into
2961 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
2962 actually being tested. *PLOW and *PHIGH will be made of the same type
2963 as the returned expression. If EXP is not a comparison, we will most
2964 likely not be returning a useful value and range. */
2965
2966 static tree
2967 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
2968 {
2969 enum tree_code code;
2970 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
2971 tree orig_type = NULL_TREE;
2972 int in_p, n_in_p;
2973 tree low, high, n_low, n_high;
2974
2975 /* Start with simply saying "EXP != 0" and then look at the code of EXP
2976 and see if we can refine the range. Some of the cases below may not
2977 happen, but it doesn't seem worth worrying about this. We "continue"
2978 the outer loop when we've changed something; otherwise we "break"
2979 the switch, which will "break" the while. */
2980
2981 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
2982
2983 while (1)
2984 {
2985 code = TREE_CODE (exp);
2986
2987 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
2988 {
2989 if (first_rtl_op (code) > 0)
2990 arg0 = TREE_OPERAND (exp, 0);
2991 if (TREE_CODE_CLASS (code) == '<'
2992 || TREE_CODE_CLASS (code) == '1'
2993 || TREE_CODE_CLASS (code) == '2')
2994 type = TREE_TYPE (arg0);
2995 if (TREE_CODE_CLASS (code) == '2'
2996 || TREE_CODE_CLASS (code) == '<'
2997 || (TREE_CODE_CLASS (code) == 'e'
2998 && TREE_CODE_LENGTH (code) > 1))
2999 arg1 = TREE_OPERAND (exp, 1);
3000 }
3001
3002 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3003 lose a cast by accident. */
3004 if (type != NULL_TREE && orig_type == NULL_TREE)
3005 orig_type = type;
3006
3007 switch (code)
3008 {
3009 case TRUTH_NOT_EXPR:
3010 in_p = ! in_p, exp = arg0;
3011 continue;
3012
3013 case EQ_EXPR: case NE_EXPR:
3014 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3015 /* We can only do something if the range is testing for zero
3016 and if the second operand is an integer constant. Note that
3017 saying something is "in" the range we make is done by
3018 complementing IN_P since it will set in the initial case of
3019 being not equal to zero; "out" is leaving it alone. */
3020 if (low == 0 || high == 0
3021 || ! integer_zerop (low) || ! integer_zerop (high)
3022 || TREE_CODE (arg1) != INTEGER_CST)
3023 break;
3024
3025 switch (code)
3026 {
3027 case NE_EXPR: /* - [c, c] */
3028 low = high = arg1;
3029 break;
3030 case EQ_EXPR: /* + [c, c] */
3031 in_p = ! in_p, low = high = arg1;
3032 break;
3033 case GT_EXPR: /* - [-, c] */
3034 low = 0, high = arg1;
3035 break;
3036 case GE_EXPR: /* + [c, -] */
3037 in_p = ! in_p, low = arg1, high = 0;
3038 break;
3039 case LT_EXPR: /* - [c, -] */
3040 low = arg1, high = 0;
3041 break;
3042 case LE_EXPR: /* + [-, c] */
3043 in_p = ! in_p, low = 0, high = arg1;
3044 break;
3045 default:
3046 abort ();
3047 }
3048
3049 exp = arg0;
3050
3051 /* If this is an unsigned comparison, we also know that EXP is
3052 greater than or equal to zero. We base the range tests we make
3053 on that fact, so we record it here so we can parse existing
3054 range tests. */
3055 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3056 {
3057 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3058 1, convert (type, integer_zero_node),
3059 NULL_TREE))
3060 break;
3061
3062 in_p = n_in_p, low = n_low, high = n_high;
3063
3064 /* If the high bound is missing, but we
3065 have a low bound, reverse the range so
3066 it goes from zero to the low bound minus 1. */
3067 if (high == 0 && low)
3068 {
3069 in_p = ! in_p;
3070 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3071 integer_one_node, 0);
3072 low = convert (type, integer_zero_node);
3073 }
3074 }
3075 continue;
3076
3077 case NEGATE_EXPR:
3078 /* (-x) IN [a,b] -> x in [-b, -a] */
3079 n_low = range_binop (MINUS_EXPR, type,
3080 convert (type, integer_zero_node), 0, high, 1);
3081 n_high = range_binop (MINUS_EXPR, type,
3082 convert (type, integer_zero_node), 0, low, 0);
3083 low = n_low, high = n_high;
3084 exp = arg0;
3085 continue;
3086
3087 case BIT_NOT_EXPR:
3088 /* ~ X -> -X - 1 */
3089 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3090 convert (type, integer_one_node));
3091 continue;
3092
3093 case PLUS_EXPR: case MINUS_EXPR:
3094 if (TREE_CODE (arg1) != INTEGER_CST)
3095 break;
3096
3097 /* If EXP is signed, any overflow in the computation is undefined,
3098 so we don't worry about it so long as our computations on
3099 the bounds don't overflow. For unsigned, overflow is defined
3100 and this is exactly the right thing. */
3101 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3102 type, low, 0, arg1, 0);
3103 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3104 type, high, 1, arg1, 0);
3105 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3106 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3107 break;
3108
3109 /* Check for an unsigned range which has wrapped around the maximum
3110 value thus making n_high < n_low, and normalize it. */
3111 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3112 {
3113 low = range_binop (PLUS_EXPR, type, n_high, 0,
3114 integer_one_node, 0);
3115 high = range_binop (MINUS_EXPR, type, n_low, 0,
3116 integer_one_node, 0);
3117
3118 /* If the range is of the form +/- [ x+1, x ], we won't
3119 be able to normalize it. But then, it represents the
3120 whole range or the empty set, so make it
3121 +/- [ -, - ]. */
3122 if (tree_int_cst_equal (n_low, low)
3123 && tree_int_cst_equal (n_high, high))
3124 low = high = 0;
3125 else
3126 in_p = ! in_p;
3127 }
3128 else
3129 low = n_low, high = n_high;
3130
3131 exp = arg0;
3132 continue;
3133
3134 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3135 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3136 break;
3137
3138 if (! INTEGRAL_TYPE_P (type)
3139 || (low != 0 && ! int_fits_type_p (low, type))
3140 || (high != 0 && ! int_fits_type_p (high, type)))
3141 break;
3142
3143 n_low = low, n_high = high;
3144
3145 if (n_low != 0)
3146 n_low = convert (type, n_low);
3147
3148 if (n_high != 0)
3149 n_high = convert (type, n_high);
3150
3151 /* If we're converting from an unsigned to a signed type,
3152 we will be doing the comparison as unsigned. The tests above
3153 have already verified that LOW and HIGH are both positive.
3154
3155 So we have to make sure that the original unsigned value will
3156 be interpreted as positive. */
3157 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3158 {
3159 tree equiv_type = (*lang_hooks.types.type_for_mode)
3160 (TYPE_MODE (type), 1);
3161 tree high_positive;
3162
3163 /* A range without an upper bound is, naturally, unbounded.
3164 Since convert would have cropped a very large value, use
3165 the max value for the destination type. */
3166 high_positive
3167 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3168 : TYPE_MAX_VALUE (type);
3169
3170 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3171 high_positive = fold (build (RSHIFT_EXPR, type,
3172 convert (type, high_positive),
3173 convert (type, integer_one_node)));
3174
3175 /* If the low bound is specified, "and" the range with the
3176 range for which the original unsigned value will be
3177 positive. */
3178 if (low != 0)
3179 {
3180 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3181 1, n_low, n_high,
3182 1, convert (type, integer_zero_node),
3183 high_positive))
3184 break;
3185
3186 in_p = (n_in_p == in_p);
3187 }
3188 else
3189 {
3190 /* Otherwise, "or" the range with the range of the input
3191 that will be interpreted as negative. */
3192 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3193 0, n_low, n_high,
3194 1, convert (type, integer_zero_node),
3195 high_positive))
3196 break;
3197
3198 in_p = (in_p != n_in_p);
3199 }
3200 }
3201
3202 exp = arg0;
3203 low = n_low, high = n_high;
3204 continue;
3205
3206 default:
3207 break;
3208 }
3209
3210 break;
3211 }
3212
3213 /* If EXP is a constant, we can evaluate whether this is true or false. */
3214 if (TREE_CODE (exp) == INTEGER_CST)
3215 {
3216 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3217 exp, 0, low, 0))
3218 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3219 exp, 1, high, 1)));
3220 low = high = 0;
3221 exp = 0;
3222 }
3223
3224 *pin_p = in_p, *plow = low, *phigh = high;
3225 return exp;
3226 }
3227 \f
3228 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3229 type, TYPE, return an expression to test if EXP is in (or out of, depending
3230 on IN_P) the range. */
3231
3232 static tree
3233 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3234 {
3235 tree etype = TREE_TYPE (exp);
3236 tree value;
3237
3238 if (! in_p
3239 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3240 return invert_truthvalue (value);
3241
3242 if (low == 0 && high == 0)
3243 return convert (type, integer_one_node);
3244
3245 if (low == 0)
3246 return fold (build (LE_EXPR, type, exp, high));
3247
3248 if (high == 0)
3249 return fold (build (GE_EXPR, type, exp, low));
3250
3251 if (operand_equal_p (low, high, 0))
3252 return fold (build (EQ_EXPR, type, exp, low));
3253
3254 if (integer_zerop (low))
3255 {
3256 if (! TREE_UNSIGNED (etype))
3257 {
3258 etype = (*lang_hooks.types.unsigned_type) (etype);
3259 high = convert (etype, high);
3260 exp = convert (etype, exp);
3261 }
3262 return build_range_check (type, exp, 1, 0, high);
3263 }
3264
3265 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3266 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3267 {
3268 unsigned HOST_WIDE_INT lo;
3269 HOST_WIDE_INT hi;
3270 int prec;
3271
3272 prec = TYPE_PRECISION (etype);
3273 if (prec <= HOST_BITS_PER_WIDE_INT)
3274 {
3275 hi = 0;
3276 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3277 }
3278 else
3279 {
3280 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3281 lo = (unsigned HOST_WIDE_INT) -1;
3282 }
3283
3284 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3285 {
3286 if (TREE_UNSIGNED (etype))
3287 {
3288 etype = (*lang_hooks.types.signed_type) (etype);
3289 exp = convert (etype, exp);
3290 }
3291 return fold (build (GT_EXPR, type, exp,
3292 convert (etype, integer_zero_node)));
3293 }
3294 }
3295
3296 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3297 && ! TREE_OVERFLOW (value))
3298 return build_range_check (type,
3299 fold (build (MINUS_EXPR, etype, exp, low)),
3300 1, convert (etype, integer_zero_node), value);
3301
3302 return 0;
3303 }
3304 \f
3305 /* Given two ranges, see if we can merge them into one. Return 1 if we
3306 can, 0 if we can't. Set the output range into the specified parameters. */
3307
3308 static int
3309 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3310 tree high0, int in1_p, tree low1, tree high1)
3311 {
3312 int no_overlap;
3313 int subset;
3314 int temp;
3315 tree tem;
3316 int in_p;
3317 tree low, high;
3318 int lowequal = ((low0 == 0 && low1 == 0)
3319 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3320 low0, 0, low1, 0)));
3321 int highequal = ((high0 == 0 && high1 == 0)
3322 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3323 high0, 1, high1, 1)));
3324
3325 /* Make range 0 be the range that starts first, or ends last if they
3326 start at the same value. Swap them if it isn't. */
3327 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3328 low0, 0, low1, 0))
3329 || (lowequal
3330 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3331 high1, 1, high0, 1))))
3332 {
3333 temp = in0_p, in0_p = in1_p, in1_p = temp;
3334 tem = low0, low0 = low1, low1 = tem;
3335 tem = high0, high0 = high1, high1 = tem;
3336 }
3337
3338 /* Now flag two cases, whether the ranges are disjoint or whether the
3339 second range is totally subsumed in the first. Note that the tests
3340 below are simplified by the ones above. */
3341 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3342 high0, 1, low1, 0));
3343 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3344 high1, 1, high0, 1));
3345
3346 /* We now have four cases, depending on whether we are including or
3347 excluding the two ranges. */
3348 if (in0_p && in1_p)
3349 {
3350 /* If they don't overlap, the result is false. If the second range
3351 is a subset it is the result. Otherwise, the range is from the start
3352 of the second to the end of the first. */
3353 if (no_overlap)
3354 in_p = 0, low = high = 0;
3355 else if (subset)
3356 in_p = 1, low = low1, high = high1;
3357 else
3358 in_p = 1, low = low1, high = high0;
3359 }
3360
3361 else if (in0_p && ! in1_p)
3362 {
3363 /* If they don't overlap, the result is the first range. If they are
3364 equal, the result is false. If the second range is a subset of the
3365 first, and the ranges begin at the same place, we go from just after
3366 the end of the first range to the end of the second. If the second
3367 range is not a subset of the first, or if it is a subset and both
3368 ranges end at the same place, the range starts at the start of the
3369 first range and ends just before the second range.
3370 Otherwise, we can't describe this as a single range. */
3371 if (no_overlap)
3372 in_p = 1, low = low0, high = high0;
3373 else if (lowequal && highequal)
3374 in_p = 0, low = high = 0;
3375 else if (subset && lowequal)
3376 {
3377 in_p = 1, high = high0;
3378 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3379 integer_one_node, 0);
3380 }
3381 else if (! subset || highequal)
3382 {
3383 in_p = 1, low = low0;
3384 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3385 integer_one_node, 0);
3386 }
3387 else
3388 return 0;
3389 }
3390
3391 else if (! in0_p && in1_p)
3392 {
3393 /* If they don't overlap, the result is the second range. If the second
3394 is a subset of the first, the result is false. Otherwise,
3395 the range starts just after the first range and ends at the
3396 end of the second. */
3397 if (no_overlap)
3398 in_p = 1, low = low1, high = high1;
3399 else if (subset || highequal)
3400 in_p = 0, low = high = 0;
3401 else
3402 {
3403 in_p = 1, high = high1;
3404 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3405 integer_one_node, 0);
3406 }
3407 }
3408
3409 else
3410 {
3411 /* The case where we are excluding both ranges. Here the complex case
3412 is if they don't overlap. In that case, the only time we have a
3413 range is if they are adjacent. If the second is a subset of the
3414 first, the result is the first. Otherwise, the range to exclude
3415 starts at the beginning of the first range and ends at the end of the
3416 second. */
3417 if (no_overlap)
3418 {
3419 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3420 range_binop (PLUS_EXPR, NULL_TREE,
3421 high0, 1,
3422 integer_one_node, 1),
3423 1, low1, 0)))
3424 in_p = 0, low = low0, high = high1;
3425 else
3426 return 0;
3427 }
3428 else if (subset)
3429 in_p = 0, low = low0, high = high0;
3430 else
3431 in_p = 0, low = low0, high = high1;
3432 }
3433
3434 *pin_p = in_p, *plow = low, *phigh = high;
3435 return 1;
3436 }
3437 \f
3438 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3439 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3440 #endif
3441
3442 /* EXP is some logical combination of boolean tests. See if we can
3443 merge it into some range test. Return the new tree if so. */
3444
3445 static tree
3446 fold_range_test (tree exp)
3447 {
3448 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3449 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3450 int in0_p, in1_p, in_p;
3451 tree low0, low1, low, high0, high1, high;
3452 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3453 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3454 tree tem;
3455
3456 /* If this is an OR operation, invert both sides; we will invert
3457 again at the end. */
3458 if (or_op)
3459 in0_p = ! in0_p, in1_p = ! in1_p;
3460
3461 /* If both expressions are the same, if we can merge the ranges, and we
3462 can build the range test, return it or it inverted. If one of the
3463 ranges is always true or always false, consider it to be the same
3464 expression as the other. */
3465 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3466 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3467 in1_p, low1, high1)
3468 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3469 lhs != 0 ? lhs
3470 : rhs != 0 ? rhs : integer_zero_node,
3471 in_p, low, high))))
3472 return or_op ? invert_truthvalue (tem) : tem;
3473
3474 /* On machines where the branch cost is expensive, if this is a
3475 short-circuited branch and the underlying object on both sides
3476 is the same, make a non-short-circuit operation. */
3477 else if (RANGE_TEST_NON_SHORT_CIRCUIT
3478 && lhs != 0 && rhs != 0
3479 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3480 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3481 && operand_equal_p (lhs, rhs, 0))
3482 {
3483 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3484 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3485 which cases we can't do this. */
3486 if (simple_operand_p (lhs))
3487 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3488 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3489 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3490 TREE_OPERAND (exp, 1));
3491
3492 else if ((*lang_hooks.decls.global_bindings_p) () == 0
3493 && ! CONTAINS_PLACEHOLDER_P (lhs))
3494 {
3495 tree common = save_expr (lhs);
3496
3497 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3498 or_op ? ! in0_p : in0_p,
3499 low0, high0))
3500 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3501 or_op ? ! in1_p : in1_p,
3502 low1, high1))))
3503 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3504 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3505 TREE_TYPE (exp), lhs, rhs);
3506 }
3507 }
3508
3509 return 0;
3510 }
3511 \f
3512 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3513 bit value. Arrange things so the extra bits will be set to zero if and
3514 only if C is signed-extended to its full width. If MASK is nonzero,
3515 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3516
3517 static tree
3518 unextend (tree c, int p, int unsignedp, tree mask)
3519 {
3520 tree type = TREE_TYPE (c);
3521 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3522 tree temp;
3523
3524 if (p == modesize || unsignedp)
3525 return c;
3526
3527 /* We work by getting just the sign bit into the low-order bit, then
3528 into the high-order bit, then sign-extend. We then XOR that value
3529 with C. */
3530 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3531 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3532
3533 /* We must use a signed type in order to get an arithmetic right shift.
3534 However, we must also avoid introducing accidental overflows, so that
3535 a subsequent call to integer_zerop will work. Hence we must
3536 do the type conversion here. At this point, the constant is either
3537 zero or one, and the conversion to a signed type can never overflow.
3538 We could get an overflow if this conversion is done anywhere else. */
3539 if (TREE_UNSIGNED (type))
3540 temp = convert ((*lang_hooks.types.signed_type) (type), temp);
3541
3542 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3543 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3544 if (mask != 0)
3545 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3546 /* If necessary, convert the type back to match the type of C. */
3547 if (TREE_UNSIGNED (type))
3548 temp = convert (type, temp);
3549
3550 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3551 }
3552 \f
3553 /* Find ways of folding logical expressions of LHS and RHS:
3554 Try to merge two comparisons to the same innermost item.
3555 Look for range tests like "ch >= '0' && ch <= '9'".
3556 Look for combinations of simple terms on machines with expensive branches
3557 and evaluate the RHS unconditionally.
3558
3559 For example, if we have p->a == 2 && p->b == 4 and we can make an
3560 object large enough to span both A and B, we can do this with a comparison
3561 against the object ANDed with the a mask.
3562
3563 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3564 operations to do this with one comparison.
3565
3566 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3567 function and the one above.
3568
3569 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3570 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3571
3572 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3573 two operands.
3574
3575 We return the simplified tree or 0 if no optimization is possible. */
3576
3577 static tree
3578 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
3579 {
3580 /* If this is the "or" of two comparisons, we can do something if
3581 the comparisons are NE_EXPR. If this is the "and", we can do something
3582 if the comparisons are EQ_EXPR. I.e.,
3583 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3584
3585 WANTED_CODE is this operation code. For single bit fields, we can
3586 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3587 comparison for one-bit fields. */
3588
3589 enum tree_code wanted_code;
3590 enum tree_code lcode, rcode;
3591 tree ll_arg, lr_arg, rl_arg, rr_arg;
3592 tree ll_inner, lr_inner, rl_inner, rr_inner;
3593 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3594 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3595 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3596 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3597 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3598 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3599 enum machine_mode lnmode, rnmode;
3600 tree ll_mask, lr_mask, rl_mask, rr_mask;
3601 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3602 tree l_const, r_const;
3603 tree lntype, rntype, result;
3604 int first_bit, end_bit;
3605 int volatilep;
3606
3607 /* Start by getting the comparison codes. Fail if anything is volatile.
3608 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3609 it were surrounded with a NE_EXPR. */
3610
3611 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3612 return 0;
3613
3614 lcode = TREE_CODE (lhs);
3615 rcode = TREE_CODE (rhs);
3616
3617 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3618 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3619
3620 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3621 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3622
3623 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3624 return 0;
3625
3626 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3627 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3628
3629 ll_arg = TREE_OPERAND (lhs, 0);
3630 lr_arg = TREE_OPERAND (lhs, 1);
3631 rl_arg = TREE_OPERAND (rhs, 0);
3632 rr_arg = TREE_OPERAND (rhs, 1);
3633
3634 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3635 if (simple_operand_p (ll_arg)
3636 && simple_operand_p (lr_arg)
3637 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3638 {
3639 int compcode;
3640
3641 if (operand_equal_p (ll_arg, rl_arg, 0)
3642 && operand_equal_p (lr_arg, rr_arg, 0))
3643 {
3644 int lcompcode, rcompcode;
3645
3646 lcompcode = comparison_to_compcode (lcode);
3647 rcompcode = comparison_to_compcode (rcode);
3648 compcode = (code == TRUTH_AND_EXPR)
3649 ? lcompcode & rcompcode
3650 : lcompcode | rcompcode;
3651 }
3652 else if (operand_equal_p (ll_arg, rr_arg, 0)
3653 && operand_equal_p (lr_arg, rl_arg, 0))
3654 {
3655 int lcompcode, rcompcode;
3656
3657 rcode = swap_tree_comparison (rcode);
3658 lcompcode = comparison_to_compcode (lcode);
3659 rcompcode = comparison_to_compcode (rcode);
3660 compcode = (code == TRUTH_AND_EXPR)
3661 ? lcompcode & rcompcode
3662 : lcompcode | rcompcode;
3663 }
3664 else
3665 compcode = -1;
3666
3667 if (compcode == COMPCODE_TRUE)
3668 return convert (truth_type, integer_one_node);
3669 else if (compcode == COMPCODE_FALSE)
3670 return convert (truth_type, integer_zero_node);
3671 else if (compcode != -1)
3672 return build (compcode_to_comparison (compcode),
3673 truth_type, ll_arg, lr_arg);
3674 }
3675
3676 /* If the RHS can be evaluated unconditionally and its operands are
3677 simple, it wins to evaluate the RHS unconditionally on machines
3678 with expensive branches. In this case, this isn't a comparison
3679 that can be merged. Avoid doing this if the RHS is a floating-point
3680 comparison since those can trap. */
3681
3682 if (BRANCH_COST >= 2
3683 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3684 && simple_operand_p (rl_arg)
3685 && simple_operand_p (rr_arg))
3686 {
3687 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
3688 if (code == TRUTH_OR_EXPR
3689 && lcode == NE_EXPR && integer_zerop (lr_arg)
3690 && rcode == NE_EXPR && integer_zerop (rr_arg)
3691 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3692 return build (NE_EXPR, truth_type,
3693 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3694 ll_arg, rl_arg),
3695 integer_zero_node);
3696
3697 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
3698 if (code == TRUTH_AND_EXPR
3699 && lcode == EQ_EXPR && integer_zerop (lr_arg)
3700 && rcode == EQ_EXPR && integer_zerop (rr_arg)
3701 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3702 return build (EQ_EXPR, truth_type,
3703 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3704 ll_arg, rl_arg),
3705 integer_zero_node);
3706
3707 return build (code, truth_type, lhs, rhs);
3708 }
3709
3710 /* See if the comparisons can be merged. Then get all the parameters for
3711 each side. */
3712
3713 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3714 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3715 return 0;
3716
3717 volatilep = 0;
3718 ll_inner = decode_field_reference (ll_arg,
3719 &ll_bitsize, &ll_bitpos, &ll_mode,
3720 &ll_unsignedp, &volatilep, &ll_mask,
3721 &ll_and_mask);
3722 lr_inner = decode_field_reference (lr_arg,
3723 &lr_bitsize, &lr_bitpos, &lr_mode,
3724 &lr_unsignedp, &volatilep, &lr_mask,
3725 &lr_and_mask);
3726 rl_inner = decode_field_reference (rl_arg,
3727 &rl_bitsize, &rl_bitpos, &rl_mode,
3728 &rl_unsignedp, &volatilep, &rl_mask,
3729 &rl_and_mask);
3730 rr_inner = decode_field_reference (rr_arg,
3731 &rr_bitsize, &rr_bitpos, &rr_mode,
3732 &rr_unsignedp, &volatilep, &rr_mask,
3733 &rr_and_mask);
3734
3735 /* It must be true that the inner operation on the lhs of each
3736 comparison must be the same if we are to be able to do anything.
3737 Then see if we have constants. If not, the same must be true for
3738 the rhs's. */
3739 if (volatilep || ll_inner == 0 || rl_inner == 0
3740 || ! operand_equal_p (ll_inner, rl_inner, 0))
3741 return 0;
3742
3743 if (TREE_CODE (lr_arg) == INTEGER_CST
3744 && TREE_CODE (rr_arg) == INTEGER_CST)
3745 l_const = lr_arg, r_const = rr_arg;
3746 else if (lr_inner == 0 || rr_inner == 0
3747 || ! operand_equal_p (lr_inner, rr_inner, 0))
3748 return 0;
3749 else
3750 l_const = r_const = 0;
3751
3752 /* If either comparison code is not correct for our logical operation,
3753 fail. However, we can convert a one-bit comparison against zero into
3754 the opposite comparison against that bit being set in the field. */
3755
3756 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3757 if (lcode != wanted_code)
3758 {
3759 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3760 {
3761 /* Make the left operand unsigned, since we are only interested
3762 in the value of one bit. Otherwise we are doing the wrong
3763 thing below. */
3764 ll_unsignedp = 1;
3765 l_const = ll_mask;
3766 }
3767 else
3768 return 0;
3769 }
3770
3771 /* This is analogous to the code for l_const above. */
3772 if (rcode != wanted_code)
3773 {
3774 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3775 {
3776 rl_unsignedp = 1;
3777 r_const = rl_mask;
3778 }
3779 else
3780 return 0;
3781 }
3782
3783 /* After this point all optimizations will generate bit-field
3784 references, which we might not want. */
3785 if (! (*lang_hooks.can_use_bit_fields_p) ())
3786 return 0;
3787
3788 /* See if we can find a mode that contains both fields being compared on
3789 the left. If we can't, fail. Otherwise, update all constants and masks
3790 to be relative to a field of that size. */
3791 first_bit = MIN (ll_bitpos, rl_bitpos);
3792 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3793 lnmode = get_best_mode (end_bit - first_bit, first_bit,
3794 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3795 volatilep);
3796 if (lnmode == VOIDmode)
3797 return 0;
3798
3799 lnbitsize = GET_MODE_BITSIZE (lnmode);
3800 lnbitpos = first_bit & ~ (lnbitsize - 1);
3801 lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
3802 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3803
3804 if (BYTES_BIG_ENDIAN)
3805 {
3806 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3807 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3808 }
3809
3810 ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
3811 size_int (xll_bitpos), 0);
3812 rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
3813 size_int (xrl_bitpos), 0);
3814
3815 if (l_const)
3816 {
3817 l_const = convert (lntype, l_const);
3818 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3819 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3820 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3821 fold (build1 (BIT_NOT_EXPR,
3822 lntype, ll_mask)),
3823 0)))
3824 {
3825 warning ("comparison is always %d", wanted_code == NE_EXPR);
3826
3827 return convert (truth_type,
3828 wanted_code == NE_EXPR
3829 ? integer_one_node : integer_zero_node);
3830 }
3831 }
3832 if (r_const)
3833 {
3834 r_const = convert (lntype, r_const);
3835 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3836 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3837 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3838 fold (build1 (BIT_NOT_EXPR,
3839 lntype, rl_mask)),
3840 0)))
3841 {
3842 warning ("comparison is always %d", wanted_code == NE_EXPR);
3843
3844 return convert (truth_type,
3845 wanted_code == NE_EXPR
3846 ? integer_one_node : integer_zero_node);
3847 }
3848 }
3849
3850 /* If the right sides are not constant, do the same for it. Also,
3851 disallow this optimization if a size or signedness mismatch occurs
3852 between the left and right sides. */
3853 if (l_const == 0)
3854 {
3855 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3856 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3857 /* Make sure the two fields on the right
3858 correspond to the left without being swapped. */
3859 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3860 return 0;
3861
3862 first_bit = MIN (lr_bitpos, rr_bitpos);
3863 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3864 rnmode = get_best_mode (end_bit - first_bit, first_bit,
3865 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3866 volatilep);
3867 if (rnmode == VOIDmode)
3868 return 0;
3869
3870 rnbitsize = GET_MODE_BITSIZE (rnmode);
3871 rnbitpos = first_bit & ~ (rnbitsize - 1);
3872 rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
3873 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3874
3875 if (BYTES_BIG_ENDIAN)
3876 {
3877 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3878 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3879 }
3880
3881 lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
3882 size_int (xlr_bitpos), 0);
3883 rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
3884 size_int (xrr_bitpos), 0);
3885
3886 /* Make a mask that corresponds to both fields being compared.
3887 Do this for both items being compared. If the operands are the
3888 same size and the bits being compared are in the same position
3889 then we can do this by masking both and comparing the masked
3890 results. */
3891 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3892 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3893 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
3894 {
3895 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3896 ll_unsignedp || rl_unsignedp);
3897 if (! all_ones_mask_p (ll_mask, lnbitsize))
3898 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
3899
3900 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
3901 lr_unsignedp || rr_unsignedp);
3902 if (! all_ones_mask_p (lr_mask, rnbitsize))
3903 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
3904
3905 return build (wanted_code, truth_type, lhs, rhs);
3906 }
3907
3908 /* There is still another way we can do something: If both pairs of
3909 fields being compared are adjacent, we may be able to make a wider
3910 field containing them both.
3911
3912 Note that we still must mask the lhs/rhs expressions. Furthermore,
3913 the mask must be shifted to account for the shift done by
3914 make_bit_field_ref. */
3915 if ((ll_bitsize + ll_bitpos == rl_bitpos
3916 && lr_bitsize + lr_bitpos == rr_bitpos)
3917 || (ll_bitpos == rl_bitpos + rl_bitsize
3918 && lr_bitpos == rr_bitpos + rr_bitsize))
3919 {
3920 tree type;
3921
3922 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
3923 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
3924 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
3925 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
3926
3927 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
3928 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
3929 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
3930 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
3931
3932 /* Convert to the smaller type before masking out unwanted bits. */
3933 type = lntype;
3934 if (lntype != rntype)
3935 {
3936 if (lnbitsize > rnbitsize)
3937 {
3938 lhs = convert (rntype, lhs);
3939 ll_mask = convert (rntype, ll_mask);
3940 type = rntype;
3941 }
3942 else if (lnbitsize < rnbitsize)
3943 {
3944 rhs = convert (lntype, rhs);
3945 lr_mask = convert (lntype, lr_mask);
3946 type = lntype;
3947 }
3948 }
3949
3950 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
3951 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3952
3953 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
3954 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
3955
3956 return build (wanted_code, truth_type, lhs, rhs);
3957 }
3958
3959 return 0;
3960 }
3961
3962 /* Handle the case of comparisons with constants. If there is something in
3963 common between the masks, those bits of the constants must be the same.
3964 If not, the condition is always false. Test for this to avoid generating
3965 incorrect code below. */
3966 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3967 if (! integer_zerop (result)
3968 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3969 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3970 {
3971 if (wanted_code == NE_EXPR)
3972 {
3973 warning ("`or' of unmatched not-equal tests is always 1");
3974 return convert (truth_type, integer_one_node);
3975 }
3976 else
3977 {
3978 warning ("`and' of mutually exclusive equal-tests is always 0");
3979 return convert (truth_type, integer_zero_node);
3980 }
3981 }
3982
3983 /* Construct the expression we will return. First get the component
3984 reference we will make. Unless the mask is all ones the width of
3985 that field, perform the mask operation. Then compare with the
3986 merged constant. */
3987 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3988 ll_unsignedp || rl_unsignedp);
3989
3990 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3991 if (! all_ones_mask_p (ll_mask, lnbitsize))
3992 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
3993
3994 return build (wanted_code, truth_type, result,
3995 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
3996 }
3997 \f
3998 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
3999 constant. */
4000
4001 static tree
4002 optimize_minmax_comparison (tree t)
4003 {
4004 tree type = TREE_TYPE (t);
4005 tree arg0 = TREE_OPERAND (t, 0);
4006 enum tree_code op_code;
4007 tree comp_const = TREE_OPERAND (t, 1);
4008 tree minmax_const;
4009 int consts_equal, consts_lt;
4010 tree inner;
4011
4012 STRIP_SIGN_NOPS (arg0);
4013
4014 op_code = TREE_CODE (arg0);
4015 minmax_const = TREE_OPERAND (arg0, 1);
4016 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4017 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4018 inner = TREE_OPERAND (arg0, 0);
4019
4020 /* If something does not permit us to optimize, return the original tree. */
4021 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4022 || TREE_CODE (comp_const) != INTEGER_CST
4023 || TREE_CONSTANT_OVERFLOW (comp_const)
4024 || TREE_CODE (minmax_const) != INTEGER_CST
4025 || TREE_CONSTANT_OVERFLOW (minmax_const))
4026 return t;
4027
4028 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4029 and GT_EXPR, doing the rest with recursive calls using logical
4030 simplifications. */
4031 switch (TREE_CODE (t))
4032 {
4033 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4034 return
4035 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4036
4037 case GE_EXPR:
4038 return
4039 fold (build (TRUTH_ORIF_EXPR, type,
4040 optimize_minmax_comparison
4041 (build (EQ_EXPR, type, arg0, comp_const)),
4042 optimize_minmax_comparison
4043 (build (GT_EXPR, type, arg0, comp_const))));
4044
4045 case EQ_EXPR:
4046 if (op_code == MAX_EXPR && consts_equal)
4047 /* MAX (X, 0) == 0 -> X <= 0 */
4048 return fold (build (LE_EXPR, type, inner, comp_const));
4049
4050 else if (op_code == MAX_EXPR && consts_lt)
4051 /* MAX (X, 0) == 5 -> X == 5 */
4052 return fold (build (EQ_EXPR, type, inner, comp_const));
4053
4054 else if (op_code == MAX_EXPR)
4055 /* MAX (X, 0) == -1 -> false */
4056 return omit_one_operand (type, integer_zero_node, inner);
4057
4058 else if (consts_equal)
4059 /* MIN (X, 0) == 0 -> X >= 0 */
4060 return fold (build (GE_EXPR, type, inner, comp_const));
4061
4062 else if (consts_lt)
4063 /* MIN (X, 0) == 5 -> false */
4064 return omit_one_operand (type, integer_zero_node, inner);
4065
4066 else
4067 /* MIN (X, 0) == -1 -> X == -1 */
4068 return fold (build (EQ_EXPR, type, inner, comp_const));
4069
4070 case GT_EXPR:
4071 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4072 /* MAX (X, 0) > 0 -> X > 0
4073 MAX (X, 0) > 5 -> X > 5 */
4074 return fold (build (GT_EXPR, type, inner, comp_const));
4075
4076 else if (op_code == MAX_EXPR)
4077 /* MAX (X, 0) > -1 -> true */
4078 return omit_one_operand (type, integer_one_node, inner);
4079
4080 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4081 /* MIN (X, 0) > 0 -> false
4082 MIN (X, 0) > 5 -> false */
4083 return omit_one_operand (type, integer_zero_node, inner);
4084
4085 else
4086 /* MIN (X, 0) > -1 -> X > -1 */
4087 return fold (build (GT_EXPR, type, inner, comp_const));
4088
4089 default:
4090 return t;
4091 }
4092 }
4093 \f
4094 /* T is an integer expression that is being multiplied, divided, or taken a
4095 modulus (CODE says which and what kind of divide or modulus) by a
4096 constant C. See if we can eliminate that operation by folding it with
4097 other operations already in T. WIDE_TYPE, if non-null, is a type that
4098 should be used for the computation if wider than our type.
4099
4100 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4101 (X * 2) + (Y * 4). We must, however, be assured that either the original
4102 expression would not overflow or that overflow is undefined for the type
4103 in the language in question.
4104
4105 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4106 the machine has a multiply-accumulate insn or that this is part of an
4107 addressing calculation.
4108
4109 If we return a non-null expression, it is an equivalent form of the
4110 original computation, but need not be in the original type. */
4111
4112 static tree
4113 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4114 {
4115 /* To avoid exponential search depth, refuse to allow recursion past
4116 three levels. Beyond that (1) it's highly unlikely that we'll find
4117 something interesting and (2) we've probably processed it before
4118 when we built the inner expression. */
4119
4120 static int depth;
4121 tree ret;
4122
4123 if (depth > 3)
4124 return NULL;
4125
4126 depth++;
4127 ret = extract_muldiv_1 (t, c, code, wide_type);
4128 depth--;
4129
4130 return ret;
4131 }
4132
4133 static tree
4134 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4135 {
4136 tree type = TREE_TYPE (t);
4137 enum tree_code tcode = TREE_CODE (t);
4138 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4139 > GET_MODE_SIZE (TYPE_MODE (type)))
4140 ? wide_type : type);
4141 tree t1, t2;
4142 int same_p = tcode == code;
4143 tree op0 = NULL_TREE, op1 = NULL_TREE;
4144
4145 /* Don't deal with constants of zero here; they confuse the code below. */
4146 if (integer_zerop (c))
4147 return NULL_TREE;
4148
4149 if (TREE_CODE_CLASS (tcode) == '1')
4150 op0 = TREE_OPERAND (t, 0);
4151
4152 if (TREE_CODE_CLASS (tcode) == '2')
4153 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4154
4155 /* Note that we need not handle conditional operations here since fold
4156 already handles those cases. So just do arithmetic here. */
4157 switch (tcode)
4158 {
4159 case INTEGER_CST:
4160 /* For a constant, we can always simplify if we are a multiply
4161 or (for divide and modulus) if it is a multiple of our constant. */
4162 if (code == MULT_EXPR
4163 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4164 return const_binop (code, convert (ctype, t), convert (ctype, c), 0);
4165 break;
4166
4167 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4168 /* If op0 is an expression ... */
4169 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4170 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4171 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4172 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4173 /* ... and is unsigned, and its type is smaller than ctype,
4174 then we cannot pass through as widening. */
4175 && ((TREE_UNSIGNED (TREE_TYPE (op0))
4176 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4177 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4178 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4179 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4180 /* ... or its type is larger than ctype,
4181 then we cannot pass through this truncation. */
4182 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4183 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4184 /* ... or signedness changes for division or modulus,
4185 then we cannot pass through this conversion. */
4186 || (code != MULT_EXPR
4187 && (TREE_UNSIGNED (ctype)
4188 != TREE_UNSIGNED (TREE_TYPE (op0))))))
4189 break;
4190
4191 /* Pass the constant down and see if we can make a simplification. If
4192 we can, replace this expression with the inner simplification for
4193 possible later conversion to our or some other type. */
4194 if ((t2 = convert (TREE_TYPE (op0), c)) != 0
4195 && TREE_CODE (t2) == INTEGER_CST
4196 && ! TREE_CONSTANT_OVERFLOW (t2)
4197 && (0 != (t1 = extract_muldiv (op0, t2, code,
4198 code == MULT_EXPR
4199 ? ctype : NULL_TREE))))
4200 return t1;
4201 break;
4202
4203 case NEGATE_EXPR: case ABS_EXPR:
4204 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4205 return fold (build1 (tcode, ctype, convert (ctype, t1)));
4206 break;
4207
4208 case MIN_EXPR: case MAX_EXPR:
4209 /* If widening the type changes the signedness, then we can't perform
4210 this optimization as that changes the result. */
4211 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4212 break;
4213
4214 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4215 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4216 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4217 {
4218 if (tree_int_cst_sgn (c) < 0)
4219 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4220
4221 return fold (build (tcode, ctype, convert (ctype, t1),
4222 convert (ctype, t2)));
4223 }
4224 break;
4225
4226 case WITH_RECORD_EXPR:
4227 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4228 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4229 TREE_OPERAND (t, 1));
4230 break;
4231
4232 case LSHIFT_EXPR: case RSHIFT_EXPR:
4233 /* If the second operand is constant, this is a multiplication
4234 or floor division, by a power of two, so we can treat it that
4235 way unless the multiplier or divisor overflows. */
4236 if (TREE_CODE (op1) == INTEGER_CST
4237 /* const_binop may not detect overflow correctly,
4238 so check for it explicitly here. */
4239 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4240 && TREE_INT_CST_HIGH (op1) == 0
4241 && 0 != (t1 = convert (ctype,
4242 const_binop (LSHIFT_EXPR, size_one_node,
4243 op1, 0)))
4244 && ! TREE_OVERFLOW (t1))
4245 return extract_muldiv (build (tcode == LSHIFT_EXPR
4246 ? MULT_EXPR : FLOOR_DIV_EXPR,
4247 ctype, convert (ctype, op0), t1),
4248 c, code, wide_type);
4249 break;
4250
4251 case PLUS_EXPR: case MINUS_EXPR:
4252 /* See if we can eliminate the operation on both sides. If we can, we
4253 can return a new PLUS or MINUS. If we can't, the only remaining
4254 cases where we can do anything are if the second operand is a
4255 constant. */
4256 t1 = extract_muldiv (op0, c, code, wide_type);
4257 t2 = extract_muldiv (op1, c, code, wide_type);
4258 if (t1 != 0 && t2 != 0
4259 && (code == MULT_EXPR
4260 /* If not multiplication, we can only do this if both operands
4261 are divisible by c. */
4262 || (multiple_of_p (ctype, op0, c)
4263 && multiple_of_p (ctype, op1, c))))
4264 return fold (build (tcode, ctype, convert (ctype, t1),
4265 convert (ctype, t2)));
4266
4267 /* If this was a subtraction, negate OP1 and set it to be an addition.
4268 This simplifies the logic below. */
4269 if (tcode == MINUS_EXPR)
4270 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4271
4272 if (TREE_CODE (op1) != INTEGER_CST)
4273 break;
4274
4275 /* If either OP1 or C are negative, this optimization is not safe for
4276 some of the division and remainder types while for others we need
4277 to change the code. */
4278 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4279 {
4280 if (code == CEIL_DIV_EXPR)
4281 code = FLOOR_DIV_EXPR;
4282 else if (code == FLOOR_DIV_EXPR)
4283 code = CEIL_DIV_EXPR;
4284 else if (code != MULT_EXPR
4285 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4286 break;
4287 }
4288
4289 /* If it's a multiply or a division/modulus operation of a multiple
4290 of our constant, do the operation and verify it doesn't overflow. */
4291 if (code == MULT_EXPR
4292 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4293 {
4294 op1 = const_binop (code, convert (ctype, op1), convert (ctype, c), 0);
4295 if (op1 == 0 || TREE_OVERFLOW (op1))
4296 break;
4297 }
4298 else
4299 break;
4300
4301 /* If we have an unsigned type is not a sizetype, we cannot widen
4302 the operation since it will change the result if the original
4303 computation overflowed. */
4304 if (TREE_UNSIGNED (ctype)
4305 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4306 && ctype != type)
4307 break;
4308
4309 /* If we were able to eliminate our operation from the first side,
4310 apply our operation to the second side and reform the PLUS. */
4311 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4312 return fold (build (tcode, ctype, convert (ctype, t1), op1));
4313
4314 /* The last case is if we are a multiply. In that case, we can
4315 apply the distributive law to commute the multiply and addition
4316 if the multiplication of the constants doesn't overflow. */
4317 if (code == MULT_EXPR)
4318 return fold (build (tcode, ctype, fold (build (code, ctype,
4319 convert (ctype, op0),
4320 convert (ctype, c))),
4321 op1));
4322
4323 break;
4324
4325 case MULT_EXPR:
4326 /* We have a special case here if we are doing something like
4327 (C * 8) % 4 since we know that's zero. */
4328 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4329 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4330 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4331 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4332 return omit_one_operand (type, integer_zero_node, op0);
4333
4334 /* ... fall through ... */
4335
4336 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4337 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4338 /* If we can extract our operation from the LHS, do so and return a
4339 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4340 do something only if the second operand is a constant. */
4341 if (same_p
4342 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4343 return fold (build (tcode, ctype, convert (ctype, t1),
4344 convert (ctype, op1)));
4345 else if (tcode == MULT_EXPR && code == MULT_EXPR
4346 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4347 return fold (build (tcode, ctype, convert (ctype, op0),
4348 convert (ctype, t1)));
4349 else if (TREE_CODE (op1) != INTEGER_CST)
4350 return 0;
4351
4352 /* If these are the same operation types, we can associate them
4353 assuming no overflow. */
4354 if (tcode == code
4355 && 0 != (t1 = const_binop (MULT_EXPR, convert (ctype, op1),
4356 convert (ctype, c), 0))
4357 && ! TREE_OVERFLOW (t1))
4358 return fold (build (tcode, ctype, convert (ctype, op0), t1));
4359
4360 /* If these operations "cancel" each other, we have the main
4361 optimizations of this pass, which occur when either constant is a
4362 multiple of the other, in which case we replace this with either an
4363 operation or CODE or TCODE.
4364
4365 If we have an unsigned type that is not a sizetype, we cannot do
4366 this since it will change the result if the original computation
4367 overflowed. */
4368 if ((! TREE_UNSIGNED (ctype)
4369 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4370 && ! flag_wrapv
4371 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4372 || (tcode == MULT_EXPR
4373 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4374 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4375 {
4376 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4377 return fold (build (tcode, ctype, convert (ctype, op0),
4378 convert (ctype,
4379 const_binop (TRUNC_DIV_EXPR,
4380 op1, c, 0))));
4381 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4382 return fold (build (code, ctype, convert (ctype, op0),
4383 convert (ctype,
4384 const_binop (TRUNC_DIV_EXPR,
4385 c, op1, 0))));
4386 }
4387 break;
4388
4389 default:
4390 break;
4391 }
4392
4393 return 0;
4394 }
4395 \f
4396 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4397 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4398 that we may sometimes modify the tree. */
4399
4400 static tree
4401 strip_compound_expr (tree t, tree s)
4402 {
4403 enum tree_code code = TREE_CODE (t);
4404
4405 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4406 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4407 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4408 return TREE_OPERAND (t, 1);
4409
4410 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4411 don't bother handling any other types. */
4412 else if (code == COND_EXPR)
4413 {
4414 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4415 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4416 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4417 }
4418 else if (TREE_CODE_CLASS (code) == '1')
4419 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4420 else if (TREE_CODE_CLASS (code) == '<'
4421 || TREE_CODE_CLASS (code) == '2')
4422 {
4423 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4424 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4425 }
4426
4427 return t;
4428 }
4429 \f
4430 /* Return a node which has the indicated constant VALUE (either 0 or
4431 1), and is of the indicated TYPE. */
4432
4433 static tree
4434 constant_boolean_node (int value, tree type)
4435 {
4436 if (type == integer_type_node)
4437 return value ? integer_one_node : integer_zero_node;
4438 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4439 return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4440 integer_zero_node);
4441 else
4442 {
4443 tree t = build_int_2 (value, 0);
4444
4445 TREE_TYPE (t) = type;
4446 return t;
4447 }
4448 }
4449
4450 /* Utility function for the following routine, to see how complex a nesting of
4451 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4452 we don't care (to avoid spending too much time on complex expressions.). */
4453
4454 static int
4455 count_cond (tree expr, int lim)
4456 {
4457 int ctrue, cfalse;
4458
4459 if (TREE_CODE (expr) != COND_EXPR)
4460 return 0;
4461 else if (lim <= 0)
4462 return 0;
4463
4464 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4465 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4466 return MIN (lim, 1 + ctrue + cfalse);
4467 }
4468
4469 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4470 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4471 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4472 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4473 COND is the first argument to CODE; otherwise (as in the example
4474 given here), it is the second argument. TYPE is the type of the
4475 original expression. */
4476
4477 static tree
4478 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
4479 tree cond, tree arg, int cond_first_p)
4480 {
4481 tree test, true_value, false_value;
4482 tree lhs = NULL_TREE;
4483 tree rhs = NULL_TREE;
4484 /* In the end, we'll produce a COND_EXPR. Both arms of the
4485 conditional expression will be binary operations. The left-hand
4486 side of the expression to be executed if the condition is true
4487 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4488 of the expression to be executed if the condition is true will be
4489 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4490 but apply to the expression to be executed if the conditional is
4491 false. */
4492 tree *true_lhs;
4493 tree *true_rhs;
4494 tree *false_lhs;
4495 tree *false_rhs;
4496 /* These are the codes to use for the left-hand side and right-hand
4497 side of the COND_EXPR. Normally, they are the same as CODE. */
4498 enum tree_code lhs_code = code;
4499 enum tree_code rhs_code = code;
4500 /* And these are the types of the expressions. */
4501 tree lhs_type = type;
4502 tree rhs_type = type;
4503 int save = 0;
4504
4505 if (cond_first_p)
4506 {
4507 true_rhs = false_rhs = &arg;
4508 true_lhs = &true_value;
4509 false_lhs = &false_value;
4510 }
4511 else
4512 {
4513 true_lhs = false_lhs = &arg;
4514 true_rhs = &true_value;
4515 false_rhs = &false_value;
4516 }
4517
4518 if (TREE_CODE (cond) == COND_EXPR)
4519 {
4520 test = TREE_OPERAND (cond, 0);
4521 true_value = TREE_OPERAND (cond, 1);
4522 false_value = TREE_OPERAND (cond, 2);
4523 /* If this operand throws an expression, then it does not make
4524 sense to try to perform a logical or arithmetic operation
4525 involving it. Instead of building `a + throw 3' for example,
4526 we simply build `a, throw 3'. */
4527 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4528 {
4529 if (! cond_first_p)
4530 {
4531 lhs_code = COMPOUND_EXPR;
4532 lhs_type = void_type_node;
4533 }
4534 else
4535 lhs = true_value;
4536 }
4537 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4538 {
4539 if (! cond_first_p)
4540 {
4541 rhs_code = COMPOUND_EXPR;
4542 rhs_type = void_type_node;
4543 }
4544 else
4545 rhs = false_value;
4546 }
4547 }
4548 else
4549 {
4550 tree testtype = TREE_TYPE (cond);
4551 test = cond;
4552 true_value = convert (testtype, integer_one_node);
4553 false_value = convert (testtype, integer_zero_node);
4554 }
4555
4556 /* If ARG is complex we want to make sure we only evaluate it once. Though
4557 this is only required if it is volatile, it might be more efficient even
4558 if it is not. However, if we succeed in folding one part to a constant,
4559 we do not need to make this SAVE_EXPR. Since we do this optimization
4560 primarily to see if we do end up with constant and this SAVE_EXPR
4561 interferes with later optimizations, suppressing it when we can is
4562 important.
4563
4564 If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4565 do so. Don't try to see if the result is a constant if an arm is a
4566 COND_EXPR since we get exponential behavior in that case. */
4567
4568 if (saved_expr_p (arg))
4569 save = 1;
4570 else if (lhs == 0 && rhs == 0
4571 && !TREE_CONSTANT (arg)
4572 && (*lang_hooks.decls.global_bindings_p) () == 0
4573 && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4574 || TREE_SIDE_EFFECTS (arg)))
4575 {
4576 if (TREE_CODE (true_value) != COND_EXPR)
4577 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4578
4579 if (TREE_CODE (false_value) != COND_EXPR)
4580 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4581
4582 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4583 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4584 {
4585 arg = save_expr (arg);
4586 lhs = rhs = 0;
4587 save = 1;
4588 }
4589 }
4590
4591 if (lhs == 0)
4592 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4593 if (rhs == 0)
4594 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4595
4596 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4597
4598 if (save)
4599 return build (COMPOUND_EXPR, type,
4600 convert (void_type_node, arg),
4601 strip_compound_expr (test, arg));
4602 else
4603 return convert (type, test);
4604 }
4605
4606 \f
4607 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4608
4609 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4610 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4611 ADDEND is the same as X.
4612
4613 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4614 and finite. The problematic cases are when X is zero, and its mode
4615 has signed zeros. In the case of rounding towards -infinity,
4616 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4617 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4618
4619 static bool
4620 fold_real_zero_addition_p (tree type, tree addend, int negate)
4621 {
4622 if (!real_zerop (addend))
4623 return false;
4624
4625 /* Don't allow the fold with -fsignaling-nans. */
4626 if (HONOR_SNANS (TYPE_MODE (type)))
4627 return false;
4628
4629 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4630 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4631 return true;
4632
4633 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4634 if (TREE_CODE (addend) == REAL_CST
4635 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4636 negate = !negate;
4637
4638 /* The mode has signed zeros, and we have to honor their sign.
4639 In this situation, there is only one case we can return true for.
4640 X - 0 is the same as X unless rounding towards -infinity is
4641 supported. */
4642 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4643 }
4644
4645 /* Subroutine of fold() that checks comparisons of built-in math
4646 functions against real constants.
4647
4648 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
4649 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
4650 is the type of the result and ARG0 and ARG1 are the operands of the
4651 comparison. ARG1 must be a TREE_REAL_CST.
4652
4653 The function returns the constant folded tree if a simplification
4654 can be made, and NULL_TREE otherwise. */
4655
4656 static tree
4657 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
4658 tree type, tree arg0, tree arg1)
4659 {
4660 REAL_VALUE_TYPE c;
4661
4662 if (fcode == BUILT_IN_SQRT
4663 || fcode == BUILT_IN_SQRTF
4664 || fcode == BUILT_IN_SQRTL)
4665 {
4666 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
4667 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
4668
4669 c = TREE_REAL_CST (arg1);
4670 if (REAL_VALUE_NEGATIVE (c))
4671 {
4672 /* sqrt(x) < y is always false, if y is negative. */
4673 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
4674 return omit_one_operand (type,
4675 convert (type, integer_zero_node),
4676 arg);
4677
4678 /* sqrt(x) > y is always true, if y is negative and we
4679 don't care about NaNs, i.e. negative values of x. */
4680 if (code == NE_EXPR || !HONOR_NANS (mode))
4681 return omit_one_operand (type,
4682 convert (type, integer_one_node),
4683 arg);
4684
4685 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
4686 return fold (build (GE_EXPR, type, arg,
4687 build_real (TREE_TYPE (arg), dconst0)));
4688 }
4689 else if (code == GT_EXPR || code == GE_EXPR)
4690 {
4691 REAL_VALUE_TYPE c2;
4692
4693 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4694 real_convert (&c2, mode, &c2);
4695
4696 if (REAL_VALUE_ISINF (c2))
4697 {
4698 /* sqrt(x) > y is x == +Inf, when y is very large. */
4699 if (HONOR_INFINITIES (mode))
4700 return fold (build (EQ_EXPR, type, arg,
4701 build_real (TREE_TYPE (arg), c2)));
4702
4703 /* sqrt(x) > y is always false, when y is very large
4704 and we don't care about infinities. */
4705 return omit_one_operand (type,
4706 convert (type, integer_zero_node),
4707 arg);
4708 }
4709
4710 /* sqrt(x) > c is the same as x > c*c. */
4711 return fold (build (code, type, arg,
4712 build_real (TREE_TYPE (arg), c2)));
4713 }
4714 else if (code == LT_EXPR || code == LE_EXPR)
4715 {
4716 REAL_VALUE_TYPE c2;
4717
4718 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4719 real_convert (&c2, mode, &c2);
4720
4721 if (REAL_VALUE_ISINF (c2))
4722 {
4723 /* sqrt(x) < y is always true, when y is a very large
4724 value and we don't care about NaNs or Infinities. */
4725 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
4726 return omit_one_operand (type,
4727 convert (type, integer_one_node),
4728 arg);
4729
4730 /* sqrt(x) < y is x != +Inf when y is very large and we
4731 don't care about NaNs. */
4732 if (! HONOR_NANS (mode))
4733 return fold (build (NE_EXPR, type, arg,
4734 build_real (TREE_TYPE (arg), c2)));
4735
4736 /* sqrt(x) < y is x >= 0 when y is very large and we
4737 don't care about Infinities. */
4738 if (! HONOR_INFINITIES (mode))
4739 return fold (build (GE_EXPR, type, arg,
4740 build_real (TREE_TYPE (arg), dconst0)));
4741
4742 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
4743 if ((*lang_hooks.decls.global_bindings_p) () != 0
4744 || CONTAINS_PLACEHOLDER_P (arg))
4745 return NULL_TREE;
4746
4747 arg = save_expr (arg);
4748 return fold (build (TRUTH_ANDIF_EXPR, type,
4749 fold (build (GE_EXPR, type, arg,
4750 build_real (TREE_TYPE (arg),
4751 dconst0))),
4752 fold (build (NE_EXPR, type, arg,
4753 build_real (TREE_TYPE (arg),
4754 c2)))));
4755 }
4756
4757 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
4758 if (! HONOR_NANS (mode))
4759 return fold (build (code, type, arg,
4760 build_real (TREE_TYPE (arg), c2)));
4761
4762 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
4763 if ((*lang_hooks.decls.global_bindings_p) () == 0
4764 && ! CONTAINS_PLACEHOLDER_P (arg))
4765 {
4766 arg = save_expr (arg);
4767 return fold (build (TRUTH_ANDIF_EXPR, type,
4768 fold (build (GE_EXPR, type, arg,
4769 build_real (TREE_TYPE (arg),
4770 dconst0))),
4771 fold (build (code, type, arg,
4772 build_real (TREE_TYPE (arg),
4773 c2)))));
4774 }
4775 }
4776 }
4777
4778 return NULL_TREE;
4779 }
4780
4781 /* Subroutine of fold() that optimizes comparisons against Infinities,
4782 either +Inf or -Inf.
4783
4784 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
4785 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
4786 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
4787
4788 The function returns the constant folded tree if a simplification
4789 can be made, and NULL_TREE otherwise. */
4790
4791 static tree
4792 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
4793 {
4794 enum machine_mode mode;
4795 REAL_VALUE_TYPE max;
4796 tree temp;
4797 bool neg;
4798
4799 mode = TYPE_MODE (TREE_TYPE (arg0));
4800
4801 /* For negative infinity swap the sense of the comparison. */
4802 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
4803 if (neg)
4804 code = swap_tree_comparison (code);
4805
4806 switch (code)
4807 {
4808 case GT_EXPR:
4809 /* x > +Inf is always false, if with ignore sNANs. */
4810 if (HONOR_SNANS (mode))
4811 return NULL_TREE;
4812 return omit_one_operand (type,
4813 convert (type, integer_zero_node),
4814 arg0);
4815
4816 case LE_EXPR:
4817 /* x <= +Inf is always true, if we don't case about NaNs. */
4818 if (! HONOR_NANS (mode))
4819 return omit_one_operand (type,
4820 convert (type, integer_one_node),
4821 arg0);
4822
4823 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
4824 if ((*lang_hooks.decls.global_bindings_p) () == 0
4825 && ! CONTAINS_PLACEHOLDER_P (arg0))
4826 {
4827 arg0 = save_expr (arg0);
4828 return fold (build (EQ_EXPR, type, arg0, arg0));
4829 }
4830 break;
4831
4832 case EQ_EXPR:
4833 case GE_EXPR:
4834 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
4835 real_maxval (&max, neg, mode);
4836 return fold (build (neg ? LT_EXPR : GT_EXPR, type,
4837 arg0, build_real (TREE_TYPE (arg0), max)));
4838
4839 case LT_EXPR:
4840 /* x < +Inf is always equal to x <= DBL_MAX. */
4841 real_maxval (&max, neg, mode);
4842 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
4843 arg0, build_real (TREE_TYPE (arg0), max)));
4844
4845 case NE_EXPR:
4846 /* x != +Inf is always equal to !(x > DBL_MAX). */
4847 real_maxval (&max, neg, mode);
4848 if (! HONOR_NANS (mode))
4849 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
4850 arg0, build_real (TREE_TYPE (arg0), max)));
4851 temp = fold (build (neg ? LT_EXPR : GT_EXPR, type,
4852 arg0, build_real (TREE_TYPE (arg0), max)));
4853 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
4854
4855 default:
4856 break;
4857 }
4858
4859 return NULL_TREE;
4860 }
4861
4862 /* If CODE with arguments ARG0 and ARG1 represents a single bit
4863 equality/inequality test, then return a simplified form of
4864 the test using shifts and logical operations. Otherwise return
4865 NULL. TYPE is the desired result type. */
4866
4867 tree
4868 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
4869 tree result_type)
4870 {
4871 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
4872 operand 0. */
4873 if (code == TRUTH_NOT_EXPR)
4874 {
4875 code = TREE_CODE (arg0);
4876 if (code != NE_EXPR && code != EQ_EXPR)
4877 return NULL_TREE;
4878
4879 /* Extract the arguments of the EQ/NE. */
4880 arg1 = TREE_OPERAND (arg0, 1);
4881 arg0 = TREE_OPERAND (arg0, 0);
4882
4883 /* This requires us to invert the code. */
4884 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
4885 }
4886
4887 /* If this is testing a single bit, we can optimize the test. */
4888 if ((code == NE_EXPR || code == EQ_EXPR)
4889 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
4890 && integer_pow2p (TREE_OPERAND (arg0, 1)))
4891 {
4892 tree inner = TREE_OPERAND (arg0, 0);
4893 tree type = TREE_TYPE (arg0);
4894 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
4895 enum machine_mode operand_mode = TYPE_MODE (type);
4896 int ops_unsigned;
4897 tree signed_type, unsigned_type;
4898 tree arg00;
4899
4900 /* If we have (A & C) != 0 where C is the sign bit of A, convert
4901 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
4902 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
4903 if (arg00 != NULL_TREE)
4904 {
4905 tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
4906 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, result_type,
4907 convert (stype, arg00),
4908 convert (stype, integer_zero_node)));
4909 }
4910
4911 /* At this point, we know that arg0 is not testing the sign bit. */
4912 if (TYPE_PRECISION (type) - 1 == bitnum)
4913 abort ();
4914
4915 /* Otherwise we have (A & C) != 0 where C is a single bit,
4916 convert that into ((A >> C2) & 1). Where C2 = log2(C).
4917 Similarly for (A & C) == 0. */
4918
4919 /* If INNER is a right shift of a constant and it plus BITNUM does
4920 not overflow, adjust BITNUM and INNER. */
4921 if (TREE_CODE (inner) == RSHIFT_EXPR
4922 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
4923 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
4924 && bitnum < TYPE_PRECISION (type)
4925 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
4926 bitnum - TYPE_PRECISION (type)))
4927 {
4928 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
4929 inner = TREE_OPERAND (inner, 0);
4930 }
4931
4932 /* If we are going to be able to omit the AND below, we must do our
4933 operations as unsigned. If we must use the AND, we have a choice.
4934 Normally unsigned is faster, but for some machines signed is. */
4935 #ifdef LOAD_EXTEND_OP
4936 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
4937 #else
4938 ops_unsigned = 1;
4939 #endif
4940
4941 signed_type = (*lang_hooks.types.type_for_mode) (operand_mode, 0);
4942 unsigned_type = (*lang_hooks.types.type_for_mode) (operand_mode, 1);
4943
4944 if (bitnum != 0)
4945 inner = build (RSHIFT_EXPR, ops_unsigned ? unsigned_type : signed_type,
4946 inner, size_int (bitnum));
4947
4948 if (code == EQ_EXPR)
4949 inner = build (BIT_XOR_EXPR, ops_unsigned ? unsigned_type : signed_type,
4950 inner, integer_one_node);
4951
4952 /* Put the AND last so it can combine with more things. */
4953 inner = build (BIT_AND_EXPR, ops_unsigned ? unsigned_type : signed_type,
4954 inner, integer_one_node);
4955
4956 /* Make sure to return the proper type. */
4957 if (TREE_TYPE (inner) != result_type)
4958 inner = convert (result_type, inner);
4959
4960 return inner;
4961 }
4962 return NULL_TREE;
4963 }
4964
4965 /* Perform constant folding and related simplification of EXPR.
4966 The related simplifications include x*1 => x, x*0 => 0, etc.,
4967 and application of the associative law.
4968 NOP_EXPR conversions may be removed freely (as long as we
4969 are careful not to change the C type of the overall expression)
4970 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4971 but we can constant-fold them if they have constant operands. */
4972
4973 #ifdef ENABLE_FOLD_CHECKING
4974 # define fold(x) fold_1 (x)
4975 static tree fold_1 (tree);
4976 static
4977 #endif
4978 tree
4979 fold (tree expr)
4980 {
4981 tree t = expr, orig_t;
4982 tree t1 = NULL_TREE;
4983 tree tem;
4984 tree type = TREE_TYPE (expr);
4985 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4986 enum tree_code code = TREE_CODE (t);
4987 int kind = TREE_CODE_CLASS (code);
4988 int invert;
4989 /* WINS will be nonzero when the switch is done
4990 if all operands are constant. */
4991 int wins = 1;
4992
4993 /* Don't try to process an RTL_EXPR since its operands aren't trees.
4994 Likewise for a SAVE_EXPR that's already been evaluated. */
4995 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
4996 return t;
4997
4998 /* Return right away if a constant. */
4999 if (kind == 'c')
5000 return t;
5001
5002 #ifdef MAX_INTEGER_COMPUTATION_MODE
5003 check_max_integer_computation_mode (expr);
5004 #endif
5005 orig_t = t;
5006
5007 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5008 {
5009 tree subop;
5010
5011 /* Special case for conversion ops that can have fixed point args. */
5012 arg0 = TREE_OPERAND (t, 0);
5013
5014 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5015 if (arg0 != 0)
5016 STRIP_SIGN_NOPS (arg0);
5017
5018 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5019 subop = TREE_REALPART (arg0);
5020 else
5021 subop = arg0;
5022
5023 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5024 && TREE_CODE (subop) != REAL_CST
5025 )
5026 /* Note that TREE_CONSTANT isn't enough:
5027 static var addresses are constant but we can't
5028 do arithmetic on them. */
5029 wins = 0;
5030 }
5031 else if (IS_EXPR_CODE_CLASS (kind))
5032 {
5033 int len = first_rtl_op (code);
5034 int i;
5035 for (i = 0; i < len; i++)
5036 {
5037 tree op = TREE_OPERAND (t, i);
5038 tree subop;
5039
5040 if (op == 0)
5041 continue; /* Valid for CALL_EXPR, at least. */
5042
5043 if (kind == '<' || code == RSHIFT_EXPR)
5044 {
5045 /* Signedness matters here. Perhaps we can refine this
5046 later. */
5047 STRIP_SIGN_NOPS (op);
5048 }
5049 else
5050 /* Strip any conversions that don't change the mode. */
5051 STRIP_NOPS (op);
5052
5053 if (TREE_CODE (op) == COMPLEX_CST)
5054 subop = TREE_REALPART (op);
5055 else
5056 subop = op;
5057
5058 if (TREE_CODE (subop) != INTEGER_CST
5059 && TREE_CODE (subop) != REAL_CST)
5060 /* Note that TREE_CONSTANT isn't enough:
5061 static var addresses are constant but we can't
5062 do arithmetic on them. */
5063 wins = 0;
5064
5065 if (i == 0)
5066 arg0 = op;
5067 else if (i == 1)
5068 arg1 = op;
5069 }
5070 }
5071
5072 /* If this is a commutative operation, and ARG0 is a constant, move it
5073 to ARG1 to reduce the number of tests below. */
5074 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
5075 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
5076 || code == BIT_AND_EXPR)
5077 && ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) != INTEGER_CST)
5078 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) != REAL_CST)))
5079 {
5080 tem = arg0; arg0 = arg1; arg1 = tem;
5081
5082 if (t == orig_t)
5083 t = copy_node (t);
5084 TREE_OPERAND (t, 0) = arg0;
5085 TREE_OPERAND (t, 1) = arg1;
5086 }
5087
5088 /* Now WINS is set as described above,
5089 ARG0 is the first operand of EXPR,
5090 and ARG1 is the second operand (if it has more than one operand).
5091
5092 First check for cases where an arithmetic operation is applied to a
5093 compound, conditional, or comparison operation. Push the arithmetic
5094 operation inside the compound or conditional to see if any folding
5095 can then be done. Convert comparison to conditional for this purpose.
5096 The also optimizes non-constant cases that used to be done in
5097 expand_expr.
5098
5099 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5100 one of the operands is a comparison and the other is a comparison, a
5101 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5102 code below would make the expression more complex. Change it to a
5103 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5104 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5105
5106 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5107 || code == EQ_EXPR || code == NE_EXPR)
5108 && ((truth_value_p (TREE_CODE (arg0))
5109 && (truth_value_p (TREE_CODE (arg1))
5110 || (TREE_CODE (arg1) == BIT_AND_EXPR
5111 && integer_onep (TREE_OPERAND (arg1, 1)))))
5112 || (truth_value_p (TREE_CODE (arg1))
5113 && (truth_value_p (TREE_CODE (arg0))
5114 || (TREE_CODE (arg0) == BIT_AND_EXPR
5115 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5116 {
5117 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5118 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5119 : TRUTH_XOR_EXPR,
5120 type, arg0, arg1));
5121
5122 if (code == EQ_EXPR)
5123 t = invert_truthvalue (t);
5124
5125 return t;
5126 }
5127
5128 if (TREE_CODE_CLASS (code) == '1')
5129 {
5130 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5131 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5132 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5133 else if (TREE_CODE (arg0) == COND_EXPR)
5134 {
5135 tree arg01 = TREE_OPERAND (arg0, 1);
5136 tree arg02 = TREE_OPERAND (arg0, 2);
5137 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5138 arg01 = fold (build1 (code, type, arg01));
5139 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5140 arg02 = fold (build1 (code, type, arg02));
5141 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5142 arg01, arg02));
5143
5144 /* If this was a conversion, and all we did was to move into
5145 inside the COND_EXPR, bring it back out. But leave it if
5146 it is a conversion from integer to integer and the
5147 result precision is no wider than a word since such a
5148 conversion is cheap and may be optimized away by combine,
5149 while it couldn't if it were outside the COND_EXPR. Then return
5150 so we don't get into an infinite recursion loop taking the
5151 conversion out and then back in. */
5152
5153 if ((code == NOP_EXPR || code == CONVERT_EXPR
5154 || code == NON_LVALUE_EXPR)
5155 && TREE_CODE (t) == COND_EXPR
5156 && TREE_CODE (TREE_OPERAND (t, 1)) == code
5157 && TREE_CODE (TREE_OPERAND (t, 2)) == code
5158 && ! VOID_TYPE_P (TREE_OPERAND (t, 1))
5159 && ! VOID_TYPE_P (TREE_OPERAND (t, 2))
5160 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
5161 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
5162 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
5163 && (INTEGRAL_TYPE_P
5164 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
5165 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
5166 t = build1 (code, type,
5167 build (COND_EXPR,
5168 TREE_TYPE (TREE_OPERAND
5169 (TREE_OPERAND (t, 1), 0)),
5170 TREE_OPERAND (t, 0),
5171 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
5172 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
5173 return t;
5174 }
5175 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5176 return fold (build (COND_EXPR, type, arg0,
5177 fold (build1 (code, type, integer_one_node)),
5178 fold (build1 (code, type, integer_zero_node))));
5179 }
5180 else if (TREE_CODE_CLASS (code) == '<'
5181 && TREE_CODE (arg0) == COMPOUND_EXPR)
5182 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5183 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5184 else if (TREE_CODE_CLASS (code) == '<'
5185 && TREE_CODE (arg1) == COMPOUND_EXPR)
5186 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5187 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5188 else if (TREE_CODE_CLASS (code) == '2'
5189 || TREE_CODE_CLASS (code) == '<')
5190 {
5191 if (TREE_CODE (arg1) == COMPOUND_EXPR
5192 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1, 0))
5193 && ! TREE_SIDE_EFFECTS (arg0))
5194 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5195 fold (build (code, type,
5196 arg0, TREE_OPERAND (arg1, 1))));
5197 else if ((TREE_CODE (arg1) == COND_EXPR
5198 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
5199 && TREE_CODE_CLASS (code) != '<'))
5200 && (TREE_CODE (arg0) != COND_EXPR
5201 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5202 && (! TREE_SIDE_EFFECTS (arg0)
5203 || ((*lang_hooks.decls.global_bindings_p) () == 0
5204 && ! CONTAINS_PLACEHOLDER_P (arg0))))
5205 return
5206 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5207 /*cond_first_p=*/0);
5208 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
5209 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5210 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5211 else if ((TREE_CODE (arg0) == COND_EXPR
5212 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5213 && TREE_CODE_CLASS (code) != '<'))
5214 && (TREE_CODE (arg1) != COND_EXPR
5215 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5216 && (! TREE_SIDE_EFFECTS (arg1)
5217 || ((*lang_hooks.decls.global_bindings_p) () == 0
5218 && ! CONTAINS_PLACEHOLDER_P (arg1))))
5219 return
5220 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5221 /*cond_first_p=*/1);
5222 }
5223
5224 switch (code)
5225 {
5226 case INTEGER_CST:
5227 case REAL_CST:
5228 case VECTOR_CST:
5229 case STRING_CST:
5230 case COMPLEX_CST:
5231 case CONSTRUCTOR:
5232 return t;
5233
5234 case CONST_DECL:
5235 return fold (DECL_INITIAL (t));
5236
5237 case NOP_EXPR:
5238 case FLOAT_EXPR:
5239 case CONVERT_EXPR:
5240 case FIX_TRUNC_EXPR:
5241 /* Other kinds of FIX are not handled properly by fold_convert. */
5242
5243 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
5244 return TREE_OPERAND (t, 0);
5245
5246 /* Handle cases of two conversions in a row. */
5247 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5248 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5249 {
5250 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5251 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5252 tree final_type = TREE_TYPE (t);
5253 int inside_int = INTEGRAL_TYPE_P (inside_type);
5254 int inside_ptr = POINTER_TYPE_P (inside_type);
5255 int inside_float = FLOAT_TYPE_P (inside_type);
5256 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5257 int inside_unsignedp = TREE_UNSIGNED (inside_type);
5258 int inter_int = INTEGRAL_TYPE_P (inter_type);
5259 int inter_ptr = POINTER_TYPE_P (inter_type);
5260 int inter_float = FLOAT_TYPE_P (inter_type);
5261 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5262 int inter_unsignedp = TREE_UNSIGNED (inter_type);
5263 int final_int = INTEGRAL_TYPE_P (final_type);
5264 int final_ptr = POINTER_TYPE_P (final_type);
5265 int final_float = FLOAT_TYPE_P (final_type);
5266 unsigned int final_prec = TYPE_PRECISION (final_type);
5267 int final_unsignedp = TREE_UNSIGNED (final_type);
5268
5269 /* In addition to the cases of two conversions in a row
5270 handled below, if we are converting something to its own
5271 type via an object of identical or wider precision, neither
5272 conversion is needed. */
5273 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
5274 && ((inter_int && final_int) || (inter_float && final_float))
5275 && inter_prec >= final_prec)
5276 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5277
5278 /* Likewise, if the intermediate and final types are either both
5279 float or both integer, we don't need the middle conversion if
5280 it is wider than the final type and doesn't change the signedness
5281 (for integers). Avoid this if the final type is a pointer
5282 since then we sometimes need the inner conversion. Likewise if
5283 the outer has a precision not equal to the size of its mode. */
5284 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5285 || (inter_float && inside_float))
5286 && inter_prec >= inside_prec
5287 && (inter_float || inter_unsignedp == inside_unsignedp)
5288 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5289 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5290 && ! final_ptr)
5291 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5292
5293 /* If we have a sign-extension of a zero-extended value, we can
5294 replace that by a single zero-extension. */
5295 if (inside_int && inter_int && final_int
5296 && inside_prec < inter_prec && inter_prec < final_prec
5297 && inside_unsignedp && !inter_unsignedp)
5298 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5299
5300 /* Two conversions in a row are not needed unless:
5301 - some conversion is floating-point (overstrict for now), or
5302 - the intermediate type is narrower than both initial and
5303 final, or
5304 - the intermediate type and innermost type differ in signedness,
5305 and the outermost type is wider than the intermediate, or
5306 - the initial type is a pointer type and the precisions of the
5307 intermediate and final types differ, or
5308 - the final type is a pointer type and the precisions of the
5309 initial and intermediate types differ. */
5310 if (! inside_float && ! inter_float && ! final_float
5311 && (inter_prec > inside_prec || inter_prec > final_prec)
5312 && ! (inside_int && inter_int
5313 && inter_unsignedp != inside_unsignedp
5314 && inter_prec < final_prec)
5315 && ((inter_unsignedp && inter_prec > inside_prec)
5316 == (final_unsignedp && final_prec > inter_prec))
5317 && ! (inside_ptr && inter_prec != final_prec)
5318 && ! (final_ptr && inside_prec != inter_prec)
5319 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5320 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5321 && ! final_ptr)
5322 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5323 }
5324
5325 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5326 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5327 /* Detect assigning a bitfield. */
5328 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5329 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5330 {
5331 /* Don't leave an assignment inside a conversion
5332 unless assigning a bitfield. */
5333 tree prev = TREE_OPERAND (t, 0);
5334 if (t == orig_t)
5335 t = copy_node (t);
5336 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
5337 /* First do the assignment, then return converted constant. */
5338 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
5339 TREE_USED (t) = 1;
5340 return t;
5341 }
5342
5343 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5344 constants (if x has signed type, the sign bit cannot be set
5345 in c). This folds extension into the BIT_AND_EXPR. */
5346 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
5347 && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
5348 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5349 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5350 {
5351 tree and = TREE_OPERAND (t, 0);
5352 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5353 int change = 0;
5354
5355 if (TREE_UNSIGNED (TREE_TYPE (and))
5356 || (TYPE_PRECISION (TREE_TYPE (t))
5357 <= TYPE_PRECISION (TREE_TYPE (and))))
5358 change = 1;
5359 else if (TYPE_PRECISION (TREE_TYPE (and1))
5360 <= HOST_BITS_PER_WIDE_INT
5361 && host_integerp (and1, 1))
5362 {
5363 unsigned HOST_WIDE_INT cst;
5364
5365 cst = tree_low_cst (and1, 1);
5366 cst &= (HOST_WIDE_INT) -1
5367 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5368 change = (cst == 0);
5369 #ifdef LOAD_EXTEND_OP
5370 if (change
5371 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5372 == ZERO_EXTEND))
5373 {
5374 tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
5375 and0 = convert (uns, and0);
5376 and1 = convert (uns, and1);
5377 }
5378 #endif
5379 }
5380 if (change)
5381 return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
5382 convert (TREE_TYPE (t), and0),
5383 convert (TREE_TYPE (t), and1)));
5384 }
5385
5386 if (!wins)
5387 {
5388 if (TREE_CONSTANT (t) != TREE_CONSTANT (arg0))
5389 {
5390 if (t == orig_t)
5391 t = copy_node (t);
5392 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
5393 }
5394 return t;
5395 }
5396 return fold_convert (t, arg0);
5397
5398 case VIEW_CONVERT_EXPR:
5399 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5400 return build1 (VIEW_CONVERT_EXPR, type,
5401 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5402 return t;
5403
5404 case COMPONENT_REF:
5405 if (TREE_CODE (arg0) == CONSTRUCTOR
5406 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
5407 {
5408 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5409 if (m)
5410 t = TREE_VALUE (m);
5411 }
5412 return t;
5413
5414 case RANGE_EXPR:
5415 if (TREE_CONSTANT (t) != wins)
5416 {
5417 if (t == orig_t)
5418 t = copy_node (t);
5419 TREE_CONSTANT (t) = wins;
5420 }
5421 return t;
5422
5423 case NEGATE_EXPR:
5424 if (wins)
5425 {
5426 if (TREE_CODE (arg0) == INTEGER_CST)
5427 {
5428 unsigned HOST_WIDE_INT low;
5429 HOST_WIDE_INT high;
5430 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5431 TREE_INT_CST_HIGH (arg0),
5432 &low, &high);
5433 t = build_int_2 (low, high);
5434 TREE_TYPE (t) = type;
5435 TREE_OVERFLOW (t)
5436 = (TREE_OVERFLOW (arg0)
5437 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
5438 TREE_CONSTANT_OVERFLOW (t)
5439 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5440 }
5441 else if (TREE_CODE (arg0) == REAL_CST)
5442 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5443 }
5444 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5445 return TREE_OPERAND (arg0, 0);
5446 /* Convert -((double)float) into (double)(-float). */
5447 else if (TREE_CODE (arg0) == NOP_EXPR
5448 && TREE_CODE (type) == REAL_TYPE)
5449 {
5450 tree targ0 = strip_float_extensions (arg0);
5451 if (targ0 != arg0)
5452 return convert (type, build1 (NEGATE_EXPR, TREE_TYPE (targ0), targ0));
5453
5454 }
5455
5456 /* Convert - (a - b) to (b - a) for non-floating-point. */
5457 else if (TREE_CODE (arg0) == MINUS_EXPR
5458 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
5459 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
5460 TREE_OPERAND (arg0, 0));
5461
5462 /* Convert -f(x) into f(-x) where f is sin, tan or atan. */
5463 switch (builtin_mathfn_code (arg0))
5464 {
5465 case BUILT_IN_SIN:
5466 case BUILT_IN_SINF:
5467 case BUILT_IN_SINL:
5468 case BUILT_IN_TAN:
5469 case BUILT_IN_TANF:
5470 case BUILT_IN_TANL:
5471 case BUILT_IN_ATAN:
5472 case BUILT_IN_ATANF:
5473 case BUILT_IN_ATANL:
5474 if (negate_expr_p (TREE_VALUE (TREE_OPERAND (arg0, 1))))
5475 {
5476 tree fndecl, arg, arglist;
5477
5478 fndecl = get_callee_fndecl (arg0);
5479 arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5480 arg = fold (build1 (NEGATE_EXPR, type, arg));
5481 arglist = build_tree_list (NULL_TREE, arg);
5482 return build_function_call_expr (fndecl, arglist);
5483 }
5484 break;
5485
5486 default:
5487 break;
5488 }
5489 return t;
5490
5491 case ABS_EXPR:
5492 if (wins)
5493 {
5494 if (TREE_CODE (arg0) == INTEGER_CST)
5495 {
5496 /* If the value is unsigned, then the absolute value is
5497 the same as the ordinary value. */
5498 if (TREE_UNSIGNED (type))
5499 return arg0;
5500 /* Similarly, if the value is non-negative. */
5501 else if (INT_CST_LT (integer_minus_one_node, arg0))
5502 return arg0;
5503 /* If the value is negative, then the absolute value is
5504 its negation. */
5505 else
5506 {
5507 unsigned HOST_WIDE_INT low;
5508 HOST_WIDE_INT high;
5509 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5510 TREE_INT_CST_HIGH (arg0),
5511 &low, &high);
5512 t = build_int_2 (low, high);
5513 TREE_TYPE (t) = type;
5514 TREE_OVERFLOW (t)
5515 = (TREE_OVERFLOW (arg0)
5516 | force_fit_type (t, overflow));
5517 TREE_CONSTANT_OVERFLOW (t)
5518 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5519 }
5520 }
5521 else if (TREE_CODE (arg0) == REAL_CST)
5522 {
5523 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5524 t = build_real (type,
5525 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5526 }
5527 }
5528 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5529 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
5530 /* Convert fabs((double)float) into (double)fabsf(float). */
5531 else if (TREE_CODE (arg0) == NOP_EXPR
5532 && TREE_CODE (type) == REAL_TYPE)
5533 {
5534 tree targ0 = strip_float_extensions (arg0);
5535 if (targ0 != arg0)
5536 return convert (type, fold (build1 (ABS_EXPR, TREE_TYPE (targ0),
5537 targ0)));
5538 }
5539 else if (tree_expr_nonnegative_p (arg0))
5540 return arg0;
5541 return t;
5542
5543 case CONJ_EXPR:
5544 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5545 return convert (type, arg0);
5546 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5547 return build (COMPLEX_EXPR, type,
5548 TREE_OPERAND (arg0, 0),
5549 negate_expr (TREE_OPERAND (arg0, 1)));
5550 else if (TREE_CODE (arg0) == COMPLEX_CST)
5551 return build_complex (type, TREE_REALPART (arg0),
5552 negate_expr (TREE_IMAGPART (arg0)));
5553 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5554 return fold (build (TREE_CODE (arg0), type,
5555 fold (build1 (CONJ_EXPR, type,
5556 TREE_OPERAND (arg0, 0))),
5557 fold (build1 (CONJ_EXPR,
5558 type, TREE_OPERAND (arg0, 1)))));
5559 else if (TREE_CODE (arg0) == CONJ_EXPR)
5560 return TREE_OPERAND (arg0, 0);
5561 return t;
5562
5563 case BIT_NOT_EXPR:
5564 if (wins)
5565 {
5566 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5567 ~ TREE_INT_CST_HIGH (arg0));
5568 TREE_TYPE (t) = type;
5569 force_fit_type (t, 0);
5570 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5571 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5572 }
5573 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5574 return TREE_OPERAND (arg0, 0);
5575 return t;
5576
5577 case PLUS_EXPR:
5578 /* A + (-B) -> A - B */
5579 if (TREE_CODE (arg1) == NEGATE_EXPR)
5580 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5581 /* (-A) + B -> B - A */
5582 if (TREE_CODE (arg0) == NEGATE_EXPR)
5583 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5584 else if (! FLOAT_TYPE_P (type))
5585 {
5586 if (integer_zerop (arg1))
5587 return non_lvalue (convert (type, arg0));
5588
5589 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5590 with a constant, and the two constants have no bits in common,
5591 we should treat this as a BIT_IOR_EXPR since this may produce more
5592 simplifications. */
5593 if (TREE_CODE (arg0) == BIT_AND_EXPR
5594 && TREE_CODE (arg1) == BIT_AND_EXPR
5595 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5596 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5597 && integer_zerop (const_binop (BIT_AND_EXPR,
5598 TREE_OPERAND (arg0, 1),
5599 TREE_OPERAND (arg1, 1), 0)))
5600 {
5601 code = BIT_IOR_EXPR;
5602 goto bit_ior;
5603 }
5604
5605 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5606 (plus (plus (mult) (mult)) (foo)) so that we can
5607 take advantage of the factoring cases below. */
5608 if ((TREE_CODE (arg0) == PLUS_EXPR
5609 && TREE_CODE (arg1) == MULT_EXPR)
5610 || (TREE_CODE (arg1) == PLUS_EXPR
5611 && TREE_CODE (arg0) == MULT_EXPR))
5612 {
5613 tree parg0, parg1, parg, marg;
5614
5615 if (TREE_CODE (arg0) == PLUS_EXPR)
5616 parg = arg0, marg = arg1;
5617 else
5618 parg = arg1, marg = arg0;
5619 parg0 = TREE_OPERAND (parg, 0);
5620 parg1 = TREE_OPERAND (parg, 1);
5621 STRIP_NOPS (parg0);
5622 STRIP_NOPS (parg1);
5623
5624 if (TREE_CODE (parg0) == MULT_EXPR
5625 && TREE_CODE (parg1) != MULT_EXPR)
5626 return fold (build (PLUS_EXPR, type,
5627 fold (build (PLUS_EXPR, type,
5628 convert (type, parg0),
5629 convert (type, marg))),
5630 convert (type, parg1)));
5631 if (TREE_CODE (parg0) != MULT_EXPR
5632 && TREE_CODE (parg1) == MULT_EXPR)
5633 return fold (build (PLUS_EXPR, type,
5634 fold (build (PLUS_EXPR, type,
5635 convert (type, parg1),
5636 convert (type, marg))),
5637 convert (type, parg0)));
5638 }
5639
5640 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5641 {
5642 tree arg00, arg01, arg10, arg11;
5643 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5644
5645 /* (A * C) + (B * C) -> (A+B) * C.
5646 We are most concerned about the case where C is a constant,
5647 but other combinations show up during loop reduction. Since
5648 it is not difficult, try all four possibilities. */
5649
5650 arg00 = TREE_OPERAND (arg0, 0);
5651 arg01 = TREE_OPERAND (arg0, 1);
5652 arg10 = TREE_OPERAND (arg1, 0);
5653 arg11 = TREE_OPERAND (arg1, 1);
5654 same = NULL_TREE;
5655
5656 if (operand_equal_p (arg01, arg11, 0))
5657 same = arg01, alt0 = arg00, alt1 = arg10;
5658 else if (operand_equal_p (arg00, arg10, 0))
5659 same = arg00, alt0 = arg01, alt1 = arg11;
5660 else if (operand_equal_p (arg00, arg11, 0))
5661 same = arg00, alt0 = arg01, alt1 = arg10;
5662 else if (operand_equal_p (arg01, arg10, 0))
5663 same = arg01, alt0 = arg00, alt1 = arg11;
5664
5665 /* No identical multiplicands; see if we can find a common
5666 power-of-two factor in non-power-of-two multiplies. This
5667 can help in multi-dimensional array access. */
5668 else if (TREE_CODE (arg01) == INTEGER_CST
5669 && TREE_CODE (arg11) == INTEGER_CST
5670 && TREE_INT_CST_HIGH (arg01) == 0
5671 && TREE_INT_CST_HIGH (arg11) == 0)
5672 {
5673 HOST_WIDE_INT int01, int11, tmp;
5674 int01 = TREE_INT_CST_LOW (arg01);
5675 int11 = TREE_INT_CST_LOW (arg11);
5676
5677 /* Move min of absolute values to int11. */
5678 if ((int01 >= 0 ? int01 : -int01)
5679 < (int11 >= 0 ? int11 : -int11))
5680 {
5681 tmp = int01, int01 = int11, int11 = tmp;
5682 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5683 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5684 }
5685
5686 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5687 {
5688 alt0 = fold (build (MULT_EXPR, type, arg00,
5689 build_int_2 (int01 / int11, 0)));
5690 alt1 = arg10;
5691 same = arg11;
5692 }
5693 }
5694
5695 if (same)
5696 return fold (build (MULT_EXPR, type,
5697 fold (build (PLUS_EXPR, type, alt0, alt1)),
5698 same));
5699 }
5700 }
5701 else
5702 {
5703 /* See if ARG1 is zero and X + ARG1 reduces to X. */
5704 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
5705 return non_lvalue (convert (type, arg0));
5706
5707 /* Likewise if the operands are reversed. */
5708 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5709 return non_lvalue (convert (type, arg1));
5710
5711 /* Convert x+x into x*2.0. */
5712 if (operand_equal_p (arg0, arg1, 0)
5713 && SCALAR_FLOAT_TYPE_P (type))
5714 return fold (build (MULT_EXPR, type, arg0,
5715 build_real (type, dconst2)));
5716
5717 /* Convert x*c+x into x*(c+1). */
5718 if (flag_unsafe_math_optimizations
5719 && TREE_CODE (arg0) == MULT_EXPR
5720 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
5721 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
5722 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
5723 {
5724 REAL_VALUE_TYPE c;
5725
5726 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
5727 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
5728 return fold (build (MULT_EXPR, type, arg1,
5729 build_real (type, c)));
5730 }
5731
5732 /* Convert x+x*c into x*(c+1). */
5733 if (flag_unsafe_math_optimizations
5734 && TREE_CODE (arg1) == MULT_EXPR
5735 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
5736 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
5737 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
5738 {
5739 REAL_VALUE_TYPE c;
5740
5741 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
5742 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
5743 return fold (build (MULT_EXPR, type, arg0,
5744 build_real (type, c)));
5745 }
5746
5747 /* Convert x*c1+x*c2 into x*(c1+c2). */
5748 if (flag_unsafe_math_optimizations
5749 && TREE_CODE (arg0) == MULT_EXPR
5750 && TREE_CODE (arg1) == MULT_EXPR
5751 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
5752 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
5753 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
5754 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
5755 && operand_equal_p (TREE_OPERAND (arg0, 0),
5756 TREE_OPERAND (arg1, 0), 0))
5757 {
5758 REAL_VALUE_TYPE c1, c2;
5759
5760 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
5761 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
5762 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
5763 return fold (build (MULT_EXPR, type,
5764 TREE_OPERAND (arg0, 0),
5765 build_real (type, c1)));
5766 }
5767 }
5768
5769 bit_rotate:
5770 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5771 is a rotate of A by C1 bits. */
5772 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5773 is a rotate of A by B bits. */
5774 {
5775 enum tree_code code0, code1;
5776 code0 = TREE_CODE (arg0);
5777 code1 = TREE_CODE (arg1);
5778 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5779 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5780 && operand_equal_p (TREE_OPERAND (arg0, 0),
5781 TREE_OPERAND (arg1, 0), 0)
5782 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5783 {
5784 tree tree01, tree11;
5785 enum tree_code code01, code11;
5786
5787 tree01 = TREE_OPERAND (arg0, 1);
5788 tree11 = TREE_OPERAND (arg1, 1);
5789 STRIP_NOPS (tree01);
5790 STRIP_NOPS (tree11);
5791 code01 = TREE_CODE (tree01);
5792 code11 = TREE_CODE (tree11);
5793 if (code01 == INTEGER_CST
5794 && code11 == INTEGER_CST
5795 && TREE_INT_CST_HIGH (tree01) == 0
5796 && TREE_INT_CST_HIGH (tree11) == 0
5797 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5798 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5799 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5800 code0 == LSHIFT_EXPR ? tree01 : tree11);
5801 else if (code11 == MINUS_EXPR)
5802 {
5803 tree tree110, tree111;
5804 tree110 = TREE_OPERAND (tree11, 0);
5805 tree111 = TREE_OPERAND (tree11, 1);
5806 STRIP_NOPS (tree110);
5807 STRIP_NOPS (tree111);
5808 if (TREE_CODE (tree110) == INTEGER_CST
5809 && 0 == compare_tree_int (tree110,
5810 TYPE_PRECISION
5811 (TREE_TYPE (TREE_OPERAND
5812 (arg0, 0))))
5813 && operand_equal_p (tree01, tree111, 0))
5814 return build ((code0 == LSHIFT_EXPR
5815 ? LROTATE_EXPR
5816 : RROTATE_EXPR),
5817 type, TREE_OPERAND (arg0, 0), tree01);
5818 }
5819 else if (code01 == MINUS_EXPR)
5820 {
5821 tree tree010, tree011;
5822 tree010 = TREE_OPERAND (tree01, 0);
5823 tree011 = TREE_OPERAND (tree01, 1);
5824 STRIP_NOPS (tree010);
5825 STRIP_NOPS (tree011);
5826 if (TREE_CODE (tree010) == INTEGER_CST
5827 && 0 == compare_tree_int (tree010,
5828 TYPE_PRECISION
5829 (TREE_TYPE (TREE_OPERAND
5830 (arg0, 0))))
5831 && operand_equal_p (tree11, tree011, 0))
5832 return build ((code0 != LSHIFT_EXPR
5833 ? LROTATE_EXPR
5834 : RROTATE_EXPR),
5835 type, TREE_OPERAND (arg0, 0), tree11);
5836 }
5837 }
5838 }
5839
5840 associate:
5841 /* In most languages, can't associate operations on floats through
5842 parentheses. Rather than remember where the parentheses were, we
5843 don't associate floats at all, unless the user has specified
5844 -funsafe-math-optimizations. */
5845
5846 if (! wins
5847 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
5848 {
5849 tree var0, con0, lit0, minus_lit0;
5850 tree var1, con1, lit1, minus_lit1;
5851
5852 /* Split both trees into variables, constants, and literals. Then
5853 associate each group together, the constants with literals,
5854 then the result with variables. This increases the chances of
5855 literals being recombined later and of generating relocatable
5856 expressions for the sum of a constant and literal. */
5857 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
5858 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
5859 code == MINUS_EXPR);
5860
5861 /* Only do something if we found more than two objects. Otherwise,
5862 nothing has changed and we risk infinite recursion. */
5863 if (2 < ((var0 != 0) + (var1 != 0)
5864 + (con0 != 0) + (con1 != 0)
5865 + (lit0 != 0) + (lit1 != 0)
5866 + (minus_lit0 != 0) + (minus_lit1 != 0)))
5867 {
5868 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
5869 if (code == MINUS_EXPR)
5870 code = PLUS_EXPR;
5871
5872 var0 = associate_trees (var0, var1, code, type);
5873 con0 = associate_trees (con0, con1, code, type);
5874 lit0 = associate_trees (lit0, lit1, code, type);
5875 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
5876
5877 /* Preserve the MINUS_EXPR if the negative part of the literal is
5878 greater than the positive part. Otherwise, the multiplicative
5879 folding code (i.e extract_muldiv) may be fooled in case
5880 unsigned constants are subtracted, like in the following
5881 example: ((X*2 + 4) - 8U)/2. */
5882 if (minus_lit0 && lit0)
5883 {
5884 if (TREE_CODE (lit0) == INTEGER_CST
5885 && TREE_CODE (minus_lit0) == INTEGER_CST
5886 && tree_int_cst_lt (lit0, minus_lit0))
5887 {
5888 minus_lit0 = associate_trees (minus_lit0, lit0,
5889 MINUS_EXPR, type);
5890 lit0 = 0;
5891 }
5892 else
5893 {
5894 lit0 = associate_trees (lit0, minus_lit0,
5895 MINUS_EXPR, type);
5896 minus_lit0 = 0;
5897 }
5898 }
5899 if (minus_lit0)
5900 {
5901 if (con0 == 0)
5902 return convert (type, associate_trees (var0, minus_lit0,
5903 MINUS_EXPR, type));
5904 else
5905 {
5906 con0 = associate_trees (con0, minus_lit0,
5907 MINUS_EXPR, type);
5908 return convert (type, associate_trees (var0, con0,
5909 PLUS_EXPR, type));
5910 }
5911 }
5912
5913 con0 = associate_trees (con0, lit0, code, type);
5914 return convert (type, associate_trees (var0, con0, code, type));
5915 }
5916 }
5917
5918 binary:
5919 if (wins)
5920 t1 = const_binop (code, arg0, arg1, 0);
5921 if (t1 != NULL_TREE)
5922 {
5923 /* The return value should always have
5924 the same type as the original expression. */
5925 if (TREE_TYPE (t1) != TREE_TYPE (t))
5926 t1 = convert (TREE_TYPE (t), t1);
5927
5928 return t1;
5929 }
5930 return t;
5931
5932 case MINUS_EXPR:
5933 /* A - (-B) -> A + B */
5934 if (TREE_CODE (arg1) == NEGATE_EXPR)
5935 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5936 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
5937 if (TREE_CODE (arg0) == NEGATE_EXPR
5938 && (FLOAT_TYPE_P (type)
5939 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
5940 && negate_expr_p (arg1)
5941 && (! TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
5942 && (! TREE_SIDE_EFFECTS (arg1) || TREE_CONSTANT (arg0)))
5943 return fold (build (MINUS_EXPR, type, negate_expr (arg1),
5944 TREE_OPERAND (arg0, 0)));
5945
5946 if (! FLOAT_TYPE_P (type))
5947 {
5948 if (! wins && integer_zerop (arg0))
5949 return negate_expr (convert (type, arg1));
5950 if (integer_zerop (arg1))
5951 return non_lvalue (convert (type, arg0));
5952
5953 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
5954 about the case where C is a constant, just try one of the
5955 four possibilities. */
5956
5957 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
5958 && operand_equal_p (TREE_OPERAND (arg0, 1),
5959 TREE_OPERAND (arg1, 1), 0))
5960 return fold (build (MULT_EXPR, type,
5961 fold (build (MINUS_EXPR, type,
5962 TREE_OPERAND (arg0, 0),
5963 TREE_OPERAND (arg1, 0))),
5964 TREE_OPERAND (arg0, 1)));
5965
5966 /* Fold A - (A & B) into ~B & A. */
5967 if (!TREE_SIDE_EFFECTS (arg0)
5968 && TREE_CODE (arg1) == BIT_AND_EXPR)
5969 {
5970 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
5971 return fold (build (BIT_AND_EXPR, type,
5972 fold (build1 (BIT_NOT_EXPR, type,
5973 TREE_OPERAND (arg1, 0))),
5974 arg0));
5975 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
5976 return fold (build (BIT_AND_EXPR, type,
5977 fold (build1 (BIT_NOT_EXPR, type,
5978 TREE_OPERAND (arg1, 1))),
5979 arg0));
5980 }
5981 }
5982
5983 /* See if ARG1 is zero and X - ARG1 reduces to X. */
5984 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
5985 return non_lvalue (convert (type, arg0));
5986
5987 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
5988 ARG0 is zero and X + ARG0 reduces to X, since that would mean
5989 (-ARG1 + ARG0) reduces to -ARG1. */
5990 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5991 return negate_expr (convert (type, arg1));
5992
5993 /* Fold &x - &x. This can happen from &x.foo - &x.
5994 This is unsafe for certain floats even in non-IEEE formats.
5995 In IEEE, it is unsafe because it does wrong for NaNs.
5996 Also note that operand_equal_p is always false if an operand
5997 is volatile. */
5998
5999 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6000 && operand_equal_p (arg0, arg1, 0))
6001 return convert (type, integer_zero_node);
6002
6003 goto associate;
6004
6005 case MULT_EXPR:
6006 /* (-A) * (-B) -> A * B */
6007 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6008 return fold (build (MULT_EXPR, type,
6009 TREE_OPERAND (arg0, 0),
6010 negate_expr (arg1)));
6011 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6012 return fold (build (MULT_EXPR, type,
6013 negate_expr (arg0),
6014 TREE_OPERAND (arg1, 0)));
6015
6016 if (! FLOAT_TYPE_P (type))
6017 {
6018 if (integer_zerop (arg1))
6019 return omit_one_operand (type, arg1, arg0);
6020 if (integer_onep (arg1))
6021 return non_lvalue (convert (type, arg0));
6022
6023 /* (a * (1 << b)) is (a << b) */
6024 if (TREE_CODE (arg1) == LSHIFT_EXPR
6025 && integer_onep (TREE_OPERAND (arg1, 0)))
6026 return fold (build (LSHIFT_EXPR, type, arg0,
6027 TREE_OPERAND (arg1, 1)));
6028 if (TREE_CODE (arg0) == LSHIFT_EXPR
6029 && integer_onep (TREE_OPERAND (arg0, 0)))
6030 return fold (build (LSHIFT_EXPR, type, arg1,
6031 TREE_OPERAND (arg0, 1)));
6032
6033 if (TREE_CODE (arg1) == INTEGER_CST
6034 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6035 convert (type, arg1),
6036 code, NULL_TREE)))
6037 return convert (type, tem);
6038
6039 }
6040 else
6041 {
6042 /* Maybe fold x * 0 to 0. The expressions aren't the same
6043 when x is NaN, since x * 0 is also NaN. Nor are they the
6044 same in modes with signed zeros, since multiplying a
6045 negative value by 0 gives -0, not +0. */
6046 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6047 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6048 && real_zerop (arg1))
6049 return omit_one_operand (type, arg1, arg0);
6050 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6051 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6052 && real_onep (arg1))
6053 return non_lvalue (convert (type, arg0));
6054
6055 /* Transform x * -1.0 into -x. */
6056 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6057 && real_minus_onep (arg1))
6058 return fold (build1 (NEGATE_EXPR, type, arg0));
6059
6060 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6061 if (flag_unsafe_math_optimizations
6062 && TREE_CODE (arg0) == RDIV_EXPR
6063 && TREE_CODE (arg1) == REAL_CST
6064 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
6065 {
6066 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
6067 arg1, 0);
6068 if (tem)
6069 return fold (build (RDIV_EXPR, type, tem,
6070 TREE_OPERAND (arg0, 1)));
6071 }
6072
6073 if (flag_unsafe_math_optimizations)
6074 {
6075 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6076 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6077
6078 /* Optimizations of sqrt(...)*sqrt(...). */
6079 if ((fcode0 == BUILT_IN_SQRT && fcode1 == BUILT_IN_SQRT)
6080 || (fcode0 == BUILT_IN_SQRTF && fcode1 == BUILT_IN_SQRTF)
6081 || (fcode0 == BUILT_IN_SQRTL && fcode1 == BUILT_IN_SQRTL))
6082 {
6083 tree sqrtfn, arg, arglist;
6084 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6085 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6086
6087 /* Optimize sqrt(x)*sqrt(x) as x. */
6088 if (operand_equal_p (arg00, arg10, 0)
6089 && ! HONOR_SNANS (TYPE_MODE (type)))
6090 return arg00;
6091
6092 /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y). */
6093 sqrtfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6094 arg = fold (build (MULT_EXPR, type, arg00, arg10));
6095 arglist = build_tree_list (NULL_TREE, arg);
6096 return build_function_call_expr (sqrtfn, arglist);
6097 }
6098
6099 /* Optimize exp(x)*exp(y) as exp(x+y). */
6100 if ((fcode0 == BUILT_IN_EXP && fcode1 == BUILT_IN_EXP)
6101 || (fcode0 == BUILT_IN_EXPF && fcode1 == BUILT_IN_EXPF)
6102 || (fcode0 == BUILT_IN_EXPL && fcode1 == BUILT_IN_EXPL))
6103 {
6104 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6105 tree arg = build (PLUS_EXPR, type,
6106 TREE_VALUE (TREE_OPERAND (arg0, 1)),
6107 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6108 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6109 return build_function_call_expr (expfn, arglist);
6110 }
6111
6112 /* Optimizations of pow(...)*pow(...). */
6113 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6114 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6115 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6116 {
6117 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6118 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6119 1)));
6120 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6121 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6122 1)));
6123
6124 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6125 if (operand_equal_p (arg01, arg11, 0))
6126 {
6127 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6128 tree arg = build (MULT_EXPR, type, arg00, arg10);
6129 tree arglist = tree_cons (NULL_TREE, fold (arg),
6130 build_tree_list (NULL_TREE,
6131 arg01));
6132 return build_function_call_expr (powfn, arglist);
6133 }
6134
6135 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6136 if (operand_equal_p (arg00, arg10, 0))
6137 {
6138 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6139 tree arg = fold (build (PLUS_EXPR, type, arg01, arg11));
6140 tree arglist = tree_cons (NULL_TREE, arg00,
6141 build_tree_list (NULL_TREE,
6142 arg));
6143 return build_function_call_expr (powfn, arglist);
6144 }
6145 }
6146
6147 /* Optimize tan(x)*cos(x) as sin(x). */
6148 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6149 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6150 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6151 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6152 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6153 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6154 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6155 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6156 {
6157 tree sinfn;
6158
6159 switch (fcode0)
6160 {
6161 case BUILT_IN_TAN:
6162 case BUILT_IN_COS:
6163 sinfn = implicit_built_in_decls[BUILT_IN_SIN];
6164 break;
6165 case BUILT_IN_TANF:
6166 case BUILT_IN_COSF:
6167 sinfn = implicit_built_in_decls[BUILT_IN_SINF];
6168 break;
6169 case BUILT_IN_TANL:
6170 case BUILT_IN_COSL:
6171 sinfn = implicit_built_in_decls[BUILT_IN_SINL];
6172 break;
6173 default:
6174 sinfn = NULL_TREE;
6175 }
6176
6177 if (sinfn != NULL_TREE)
6178 return build_function_call_expr (sinfn,
6179 TREE_OPERAND (arg0, 1));
6180 }
6181
6182 /* Optimize x*pow(x,c) as pow(x,c+1). */
6183 if (fcode1 == BUILT_IN_POW
6184 || fcode1 == BUILT_IN_POWF
6185 || fcode1 == BUILT_IN_POWL)
6186 {
6187 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6188 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6189 1)));
6190 if (TREE_CODE (arg11) == REAL_CST
6191 && ! TREE_CONSTANT_OVERFLOW (arg11)
6192 && operand_equal_p (arg0, arg10, 0))
6193 {
6194 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6195 REAL_VALUE_TYPE c;
6196 tree arg, arglist;
6197
6198 c = TREE_REAL_CST (arg11);
6199 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6200 arg = build_real (type, c);
6201 arglist = build_tree_list (NULL_TREE, arg);
6202 arglist = tree_cons (NULL_TREE, arg0, arglist);
6203 return build_function_call_expr (powfn, arglist);
6204 }
6205 }
6206
6207 /* Optimize pow(x,c)*x as pow(x,c+1). */
6208 if (fcode0 == BUILT_IN_POW
6209 || fcode0 == BUILT_IN_POWF
6210 || fcode0 == BUILT_IN_POWL)
6211 {
6212 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6213 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6214 1)));
6215 if (TREE_CODE (arg01) == REAL_CST
6216 && ! TREE_CONSTANT_OVERFLOW (arg01)
6217 && operand_equal_p (arg1, arg00, 0))
6218 {
6219 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6220 REAL_VALUE_TYPE c;
6221 tree arg, arglist;
6222
6223 c = TREE_REAL_CST (arg01);
6224 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6225 arg = build_real (type, c);
6226 arglist = build_tree_list (NULL_TREE, arg);
6227 arglist = tree_cons (NULL_TREE, arg1, arglist);
6228 return build_function_call_expr (powfn, arglist);
6229 }
6230 }
6231
6232 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6233 if (! optimize_size
6234 && operand_equal_p (arg0, arg1, 0))
6235 {
6236 tree powfn;
6237
6238 if (type == double_type_node)
6239 powfn = implicit_built_in_decls[BUILT_IN_POW];
6240 else if (type == float_type_node)
6241 powfn = implicit_built_in_decls[BUILT_IN_POWF];
6242 else if (type == long_double_type_node)
6243 powfn = implicit_built_in_decls[BUILT_IN_POWL];
6244 else
6245 powfn = NULL_TREE;
6246
6247 if (powfn)
6248 {
6249 tree arg = build_real (type, dconst2);
6250 tree arglist = build_tree_list (NULL_TREE, arg);
6251 arglist = tree_cons (NULL_TREE, arg0, arglist);
6252 return build_function_call_expr (powfn, arglist);
6253 }
6254 }
6255 }
6256 }
6257 goto associate;
6258
6259 case BIT_IOR_EXPR:
6260 bit_ior:
6261 if (integer_all_onesp (arg1))
6262 return omit_one_operand (type, arg1, arg0);
6263 if (integer_zerop (arg1))
6264 return non_lvalue (convert (type, arg0));
6265 t1 = distribute_bit_expr (code, type, arg0, arg1);
6266 if (t1 != NULL_TREE)
6267 return t1;
6268
6269 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6270
6271 This results in more efficient code for machines without a NAND
6272 instruction. Combine will canonicalize to the first form
6273 which will allow use of NAND instructions provided by the
6274 backend if they exist. */
6275 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6276 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6277 {
6278 return fold (build1 (BIT_NOT_EXPR, type,
6279 build (BIT_AND_EXPR, type,
6280 TREE_OPERAND (arg0, 0),
6281 TREE_OPERAND (arg1, 0))));
6282 }
6283
6284 /* See if this can be simplified into a rotate first. If that
6285 is unsuccessful continue in the association code. */
6286 goto bit_rotate;
6287
6288 case BIT_XOR_EXPR:
6289 if (integer_zerop (arg1))
6290 return non_lvalue (convert (type, arg0));
6291 if (integer_all_onesp (arg1))
6292 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6293
6294 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6295 with a constant, and the two constants have no bits in common,
6296 we should treat this as a BIT_IOR_EXPR since this may produce more
6297 simplifications. */
6298 if (TREE_CODE (arg0) == BIT_AND_EXPR
6299 && TREE_CODE (arg1) == BIT_AND_EXPR
6300 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6301 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6302 && integer_zerop (const_binop (BIT_AND_EXPR,
6303 TREE_OPERAND (arg0, 1),
6304 TREE_OPERAND (arg1, 1), 0)))
6305 {
6306 code = BIT_IOR_EXPR;
6307 goto bit_ior;
6308 }
6309
6310 /* See if this can be simplified into a rotate first. If that
6311 is unsuccessful continue in the association code. */
6312 goto bit_rotate;
6313
6314 case BIT_AND_EXPR:
6315 if (integer_all_onesp (arg1))
6316 return non_lvalue (convert (type, arg0));
6317 if (integer_zerop (arg1))
6318 return omit_one_operand (type, arg1, arg0);
6319 t1 = distribute_bit_expr (code, type, arg0, arg1);
6320 if (t1 != NULL_TREE)
6321 return t1;
6322 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6323 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6324 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6325 {
6326 unsigned int prec
6327 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6328
6329 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6330 && (~TREE_INT_CST_LOW (arg1)
6331 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6332 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
6333 }
6334
6335 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6336
6337 This results in more efficient code for machines without a NOR
6338 instruction. Combine will canonicalize to the first form
6339 which will allow use of NOR instructions provided by the
6340 backend if they exist. */
6341 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6342 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6343 {
6344 return fold (build1 (BIT_NOT_EXPR, type,
6345 build (BIT_IOR_EXPR, type,
6346 TREE_OPERAND (arg0, 0),
6347 TREE_OPERAND (arg1, 0))));
6348 }
6349
6350 goto associate;
6351
6352 case RDIV_EXPR:
6353 /* Don't touch a floating-point divide by zero unless the mode
6354 of the constant can represent infinity. */
6355 if (TREE_CODE (arg1) == REAL_CST
6356 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6357 && real_zerop (arg1))
6358 return t;
6359
6360 /* (-A) / (-B) -> A / B */
6361 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6362 return fold (build (RDIV_EXPR, type,
6363 TREE_OPERAND (arg0, 0),
6364 negate_expr (arg1)));
6365 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6366 return fold (build (RDIV_EXPR, type,
6367 negate_expr (arg0),
6368 TREE_OPERAND (arg1, 0)));
6369
6370 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6371 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6372 && real_onep (arg1))
6373 return non_lvalue (convert (type, arg0));
6374
6375 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
6376 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6377 && real_minus_onep (arg1))
6378 return non_lvalue (convert (type, negate_expr (arg0)));
6379
6380 /* If ARG1 is a constant, we can convert this to a multiply by the
6381 reciprocal. This does not have the same rounding properties,
6382 so only do this if -funsafe-math-optimizations. We can actually
6383 always safely do it if ARG1 is a power of two, but it's hard to
6384 tell if it is or not in a portable manner. */
6385 if (TREE_CODE (arg1) == REAL_CST)
6386 {
6387 if (flag_unsafe_math_optimizations
6388 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6389 arg1, 0)))
6390 return fold (build (MULT_EXPR, type, arg0, tem));
6391 /* Find the reciprocal if optimizing and the result is exact. */
6392 if (optimize)
6393 {
6394 REAL_VALUE_TYPE r;
6395 r = TREE_REAL_CST (arg1);
6396 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6397 {
6398 tem = build_real (type, r);
6399 return fold (build (MULT_EXPR, type, arg0, tem));
6400 }
6401 }
6402 }
6403 /* Convert A/B/C to A/(B*C). */
6404 if (flag_unsafe_math_optimizations
6405 && TREE_CODE (arg0) == RDIV_EXPR)
6406 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6407 fold (build (MULT_EXPR, type,
6408 TREE_OPERAND (arg0, 1), arg1))));
6409
6410 /* Convert A/(B/C) to (A/B)*C. */
6411 if (flag_unsafe_math_optimizations
6412 && TREE_CODE (arg1) == RDIV_EXPR)
6413 return fold (build (MULT_EXPR, type,
6414 fold (build (RDIV_EXPR, type, arg0,
6415 TREE_OPERAND (arg1, 0))),
6416 TREE_OPERAND (arg1, 1)));
6417
6418 /* Convert C1/(X*C2) into (C1/C2)/X. */
6419 if (flag_unsafe_math_optimizations
6420 && TREE_CODE (arg1) == MULT_EXPR
6421 && TREE_CODE (arg0) == REAL_CST
6422 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
6423 {
6424 tree tem = const_binop (RDIV_EXPR, arg0,
6425 TREE_OPERAND (arg1, 1), 0);
6426 if (tem)
6427 return fold (build (RDIV_EXPR, type, tem,
6428 TREE_OPERAND (arg1, 0)));
6429 }
6430
6431 if (flag_unsafe_math_optimizations)
6432 {
6433 enum built_in_function fcode = builtin_mathfn_code (arg1);
6434 /* Optimize x/exp(y) into x*exp(-y). */
6435 if (fcode == BUILT_IN_EXP
6436 || fcode == BUILT_IN_EXPF
6437 || fcode == BUILT_IN_EXPL)
6438 {
6439 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6440 tree arg = build1 (NEGATE_EXPR, type,
6441 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6442 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6443 arg1 = build_function_call_expr (expfn, arglist);
6444 return fold (build (MULT_EXPR, type, arg0, arg1));
6445 }
6446
6447 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6448 if (fcode == BUILT_IN_POW
6449 || fcode == BUILT_IN_POWF
6450 || fcode == BUILT_IN_POWL)
6451 {
6452 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6453 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6454 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6455 tree neg11 = fold (build1 (NEGATE_EXPR, type, arg11));
6456 tree arglist = tree_cons(NULL_TREE, arg10,
6457 build_tree_list (NULL_TREE, neg11));
6458 arg1 = build_function_call_expr (powfn, arglist);
6459 return fold (build (MULT_EXPR, type, arg0, arg1));
6460 }
6461 }
6462
6463 if (flag_unsafe_math_optimizations)
6464 {
6465 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6466 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6467
6468 /* Optimize sin(x)/cos(x) as tan(x). */
6469 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
6470 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
6471 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
6472 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6473 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6474 {
6475 tree tanfn;
6476
6477 if (fcode0 == BUILT_IN_SIN)
6478 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6479 else if (fcode0 == BUILT_IN_SINF)
6480 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6481 else if (fcode0 == BUILT_IN_SINL)
6482 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6483 else
6484 tanfn = NULL_TREE;
6485
6486 if (tanfn != NULL_TREE)
6487 return build_function_call_expr (tanfn,
6488 TREE_OPERAND (arg0, 1));
6489 }
6490
6491 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
6492 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
6493 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
6494 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
6495 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6496 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6497 {
6498 tree tanfn;
6499
6500 if (fcode0 == BUILT_IN_COS)
6501 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6502 else if (fcode0 == BUILT_IN_COSF)
6503 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6504 else if (fcode0 == BUILT_IN_COSL)
6505 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6506 else
6507 tanfn = NULL_TREE;
6508
6509 if (tanfn != NULL_TREE)
6510 {
6511 tree tmp = TREE_OPERAND (arg0, 1);
6512 tmp = build_function_call_expr (tanfn, tmp);
6513 return fold (build (RDIV_EXPR, type,
6514 build_real (type, dconst1),
6515 tmp));
6516 }
6517 }
6518
6519 /* Optimize pow(x,c)/x as pow(x,c-1). */
6520 if (fcode0 == BUILT_IN_POW
6521 || fcode0 == BUILT_IN_POWF
6522 || fcode0 == BUILT_IN_POWL)
6523 {
6524 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6525 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
6526 if (TREE_CODE (arg01) == REAL_CST
6527 && ! TREE_CONSTANT_OVERFLOW (arg01)
6528 && operand_equal_p (arg1, arg00, 0))
6529 {
6530 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6531 REAL_VALUE_TYPE c;
6532 tree arg, arglist;
6533
6534 c = TREE_REAL_CST (arg01);
6535 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
6536 arg = build_real (type, c);
6537 arglist = build_tree_list (NULL_TREE, arg);
6538 arglist = tree_cons (NULL_TREE, arg1, arglist);
6539 return build_function_call_expr (powfn, arglist);
6540 }
6541 }
6542 }
6543 goto binary;
6544
6545 case TRUNC_DIV_EXPR:
6546 case ROUND_DIV_EXPR:
6547 case FLOOR_DIV_EXPR:
6548 case CEIL_DIV_EXPR:
6549 case EXACT_DIV_EXPR:
6550 if (integer_onep (arg1))
6551 return non_lvalue (convert (type, arg0));
6552 if (integer_zerop (arg1))
6553 return t;
6554
6555 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6556 operation, EXACT_DIV_EXPR.
6557
6558 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6559 At one time others generated faster code, it's not clear if they do
6560 after the last round to changes to the DIV code in expmed.c. */
6561 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
6562 && multiple_of_p (type, arg0, arg1))
6563 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
6564
6565 if (TREE_CODE (arg1) == INTEGER_CST
6566 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6567 code, NULL_TREE)))
6568 return convert (type, tem);
6569
6570 goto binary;
6571
6572 case CEIL_MOD_EXPR:
6573 case FLOOR_MOD_EXPR:
6574 case ROUND_MOD_EXPR:
6575 case TRUNC_MOD_EXPR:
6576 if (integer_onep (arg1))
6577 return omit_one_operand (type, integer_zero_node, arg0);
6578 if (integer_zerop (arg1))
6579 return t;
6580
6581 if (TREE_CODE (arg1) == INTEGER_CST
6582 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6583 code, NULL_TREE)))
6584 return convert (type, tem);
6585
6586 goto binary;
6587
6588 case LROTATE_EXPR:
6589 case RROTATE_EXPR:
6590 if (integer_all_onesp (arg0))
6591 return omit_one_operand (type, arg0, arg1);
6592 goto shift;
6593
6594 case RSHIFT_EXPR:
6595 /* Optimize -1 >> x for arithmetic right shifts. */
6596 if (integer_all_onesp (arg0) && ! TREE_UNSIGNED (type))
6597 return omit_one_operand (type, arg0, arg1);
6598 /* ... fall through ... */
6599
6600 case LSHIFT_EXPR:
6601 shift:
6602 if (integer_zerop (arg1))
6603 return non_lvalue (convert (type, arg0));
6604 if (integer_zerop (arg0))
6605 return omit_one_operand (type, arg0, arg1);
6606
6607 /* Since negative shift count is not well-defined,
6608 don't try to compute it in the compiler. */
6609 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
6610 return t;
6611 /* Rewrite an LROTATE_EXPR by a constant into an
6612 RROTATE_EXPR by a new constant. */
6613 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
6614 {
6615 if (t == orig_t)
6616 t = copy_node (t);
6617 TREE_SET_CODE (t, RROTATE_EXPR);
6618 code = RROTATE_EXPR;
6619 TREE_OPERAND (t, 1) = arg1
6620 = const_binop
6621 (MINUS_EXPR,
6622 convert (TREE_TYPE (arg1),
6623 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
6624 arg1, 0);
6625 if (tree_int_cst_sgn (arg1) < 0)
6626 return t;
6627 }
6628
6629 /* If we have a rotate of a bit operation with the rotate count and
6630 the second operand of the bit operation both constant,
6631 permute the two operations. */
6632 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6633 && (TREE_CODE (arg0) == BIT_AND_EXPR
6634 || TREE_CODE (arg0) == BIT_IOR_EXPR
6635 || TREE_CODE (arg0) == BIT_XOR_EXPR)
6636 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6637 return fold (build (TREE_CODE (arg0), type,
6638 fold (build (code, type,
6639 TREE_OPERAND (arg0, 0), arg1)),
6640 fold (build (code, type,
6641 TREE_OPERAND (arg0, 1), arg1))));
6642
6643 /* Two consecutive rotates adding up to the width of the mode can
6644 be ignored. */
6645 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6646 && TREE_CODE (arg0) == RROTATE_EXPR
6647 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6648 && TREE_INT_CST_HIGH (arg1) == 0
6649 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
6650 && ((TREE_INT_CST_LOW (arg1)
6651 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
6652 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
6653 return TREE_OPERAND (arg0, 0);
6654
6655 goto binary;
6656
6657 case MIN_EXPR:
6658 if (operand_equal_p (arg0, arg1, 0))
6659 return omit_one_operand (type, arg0, arg1);
6660 if (INTEGRAL_TYPE_P (type)
6661 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
6662 return omit_one_operand (type, arg1, arg0);
6663 goto associate;
6664
6665 case MAX_EXPR:
6666 if (operand_equal_p (arg0, arg1, 0))
6667 return omit_one_operand (type, arg0, arg1);
6668 if (INTEGRAL_TYPE_P (type)
6669 && TYPE_MAX_VALUE (type)
6670 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
6671 return omit_one_operand (type, arg1, arg0);
6672 goto associate;
6673
6674 case TRUTH_NOT_EXPR:
6675 /* Note that the operand of this must be an int
6676 and its values must be 0 or 1.
6677 ("true" is a fixed value perhaps depending on the language,
6678 but we don't handle values other than 1 correctly yet.) */
6679 tem = invert_truthvalue (arg0);
6680 /* Avoid infinite recursion. */
6681 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6682 {
6683 tem = fold_single_bit_test (code, arg0, arg1, type);
6684 if (tem)
6685 return tem;
6686 return t;
6687 }
6688 return convert (type, tem);
6689
6690 case TRUTH_ANDIF_EXPR:
6691 /* Note that the operands of this must be ints
6692 and their values must be 0 or 1.
6693 ("true" is a fixed value perhaps depending on the language.) */
6694 /* If first arg is constant zero, return it. */
6695 if (integer_zerop (arg0))
6696 return convert (type, arg0);
6697 case TRUTH_AND_EXPR:
6698 /* If either arg is constant true, drop it. */
6699 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6700 return non_lvalue (convert (type, arg1));
6701 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
6702 /* Preserve sequence points. */
6703 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
6704 return non_lvalue (convert (type, arg0));
6705 /* If second arg is constant zero, result is zero, but first arg
6706 must be evaluated. */
6707 if (integer_zerop (arg1))
6708 return omit_one_operand (type, arg1, arg0);
6709 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
6710 case will be handled here. */
6711 if (integer_zerop (arg0))
6712 return omit_one_operand (type, arg0, arg1);
6713
6714 truth_andor:
6715 /* We only do these simplifications if we are optimizing. */
6716 if (!optimize)
6717 return t;
6718
6719 /* Check for things like (A || B) && (A || C). We can convert this
6720 to A || (B && C). Note that either operator can be any of the four
6721 truth and/or operations and the transformation will still be
6722 valid. Also note that we only care about order for the
6723 ANDIF and ORIF operators. If B contains side effects, this
6724 might change the truth-value of A. */
6725 if (TREE_CODE (arg0) == TREE_CODE (arg1)
6726 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
6727 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
6728 || TREE_CODE (arg0) == TRUTH_AND_EXPR
6729 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
6730 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
6731 {
6732 tree a00 = TREE_OPERAND (arg0, 0);
6733 tree a01 = TREE_OPERAND (arg0, 1);
6734 tree a10 = TREE_OPERAND (arg1, 0);
6735 tree a11 = TREE_OPERAND (arg1, 1);
6736 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
6737 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
6738 && (code == TRUTH_AND_EXPR
6739 || code == TRUTH_OR_EXPR));
6740
6741 if (operand_equal_p (a00, a10, 0))
6742 return fold (build (TREE_CODE (arg0), type, a00,
6743 fold (build (code, type, a01, a11))));
6744 else if (commutative && operand_equal_p (a00, a11, 0))
6745 return fold (build (TREE_CODE (arg0), type, a00,
6746 fold (build (code, type, a01, a10))));
6747 else if (commutative && operand_equal_p (a01, a10, 0))
6748 return fold (build (TREE_CODE (arg0), type, a01,
6749 fold (build (code, type, a00, a11))));
6750
6751 /* This case if tricky because we must either have commutative
6752 operators or else A10 must not have side-effects. */
6753
6754 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
6755 && operand_equal_p (a01, a11, 0))
6756 return fold (build (TREE_CODE (arg0), type,
6757 fold (build (code, type, a00, a10)),
6758 a01));
6759 }
6760
6761 /* See if we can build a range comparison. */
6762 if (0 != (tem = fold_range_test (t)))
6763 return tem;
6764
6765 /* Check for the possibility of merging component references. If our
6766 lhs is another similar operation, try to merge its rhs with our
6767 rhs. Then try to merge our lhs and rhs. */
6768 if (TREE_CODE (arg0) == code
6769 && 0 != (tem = fold_truthop (code, type,
6770 TREE_OPERAND (arg0, 1), arg1)))
6771 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6772
6773 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
6774 return tem;
6775
6776 return t;
6777
6778 case TRUTH_ORIF_EXPR:
6779 /* Note that the operands of this must be ints
6780 and their values must be 0 or true.
6781 ("true" is a fixed value perhaps depending on the language.) */
6782 /* If first arg is constant true, return it. */
6783 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6784 return convert (type, arg0);
6785 case TRUTH_OR_EXPR:
6786 /* If either arg is constant zero, drop it. */
6787 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
6788 return non_lvalue (convert (type, arg1));
6789 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
6790 /* Preserve sequence points. */
6791 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
6792 return non_lvalue (convert (type, arg0));
6793 /* If second arg is constant true, result is true, but we must
6794 evaluate first arg. */
6795 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
6796 return omit_one_operand (type, arg1, arg0);
6797 /* Likewise for first arg, but note this only occurs here for
6798 TRUTH_OR_EXPR. */
6799 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6800 return omit_one_operand (type, arg0, arg1);
6801 goto truth_andor;
6802
6803 case TRUTH_XOR_EXPR:
6804 /* If either arg is constant zero, drop it. */
6805 if (integer_zerop (arg0))
6806 return non_lvalue (convert (type, arg1));
6807 if (integer_zerop (arg1))
6808 return non_lvalue (convert (type, arg0));
6809 /* If either arg is constant true, this is a logical inversion. */
6810 if (integer_onep (arg0))
6811 return non_lvalue (convert (type, invert_truthvalue (arg1)));
6812 if (integer_onep (arg1))
6813 return non_lvalue (convert (type, invert_truthvalue (arg0)));
6814 return t;
6815
6816 case EQ_EXPR:
6817 case NE_EXPR:
6818 case LT_EXPR:
6819 case GT_EXPR:
6820 case LE_EXPR:
6821 case GE_EXPR:
6822 /* If one arg is a real or integer constant, put it last. */
6823 if ((TREE_CODE (arg0) == INTEGER_CST
6824 && TREE_CODE (arg1) != INTEGER_CST)
6825 || (TREE_CODE (arg0) == REAL_CST
6826 && TREE_CODE (arg0) != REAL_CST))
6827 {
6828 if (t == orig_t)
6829 t = copy_node (t);
6830 TREE_OPERAND (t, 0) = arg1;
6831 TREE_OPERAND (t, 1) = arg0;
6832 arg0 = TREE_OPERAND (t, 0);
6833 arg1 = TREE_OPERAND (t, 1);
6834 code = swap_tree_comparison (code);
6835 TREE_SET_CODE (t, code);
6836 }
6837
6838 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
6839 {
6840 tree targ0 = strip_float_extensions (arg0);
6841 tree targ1 = strip_float_extensions (arg1);
6842 tree newtype = TREE_TYPE (targ0);
6843
6844 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
6845 newtype = TREE_TYPE (targ1);
6846
6847 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
6848 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
6849 return fold (build (code, type, convert (newtype, targ0),
6850 convert (newtype, targ1)));
6851
6852 /* (-a) CMP (-b) -> b CMP a */
6853 if (TREE_CODE (arg0) == NEGATE_EXPR
6854 && TREE_CODE (arg1) == NEGATE_EXPR)
6855 return fold (build (code, type, TREE_OPERAND (arg1, 0),
6856 TREE_OPERAND (arg0, 0)));
6857
6858 if (TREE_CODE (arg1) == REAL_CST)
6859 {
6860 REAL_VALUE_TYPE cst;
6861 cst = TREE_REAL_CST (arg1);
6862
6863 /* (-a) CMP CST -> a swap(CMP) (-CST) */
6864 if (TREE_CODE (arg0) == NEGATE_EXPR)
6865 return
6866 fold (build (swap_tree_comparison (code), type,
6867 TREE_OPERAND (arg0, 0),
6868 build_real (TREE_TYPE (arg1),
6869 REAL_VALUE_NEGATE (cst))));
6870
6871 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
6872 /* a CMP (-0) -> a CMP 0 */
6873 if (REAL_VALUE_MINUS_ZERO (cst))
6874 return fold (build (code, type, arg0,
6875 build_real (TREE_TYPE (arg1), dconst0)));
6876
6877 /* x != NaN is always true, other ops are always false. */
6878 if (REAL_VALUE_ISNAN (cst)
6879 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
6880 {
6881 t = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
6882 return omit_one_operand (type, convert (type, t), arg0);
6883 }
6884
6885 /* Fold comparisons against infinity. */
6886 if (REAL_VALUE_ISINF (cst))
6887 {
6888 tem = fold_inf_compare (code, type, arg0, arg1);
6889 if (tem != NULL_TREE)
6890 return tem;
6891 }
6892 }
6893
6894 /* If this is a comparison of a real constant with a PLUS_EXPR
6895 or a MINUS_EXPR of a real constant, we can convert it into a
6896 comparison with a revised real constant as long as no overflow
6897 occurs when unsafe_math_optimizations are enabled. */
6898 if (flag_unsafe_math_optimizations
6899 && TREE_CODE (arg1) == REAL_CST
6900 && (TREE_CODE (arg0) == PLUS_EXPR
6901 || TREE_CODE (arg0) == MINUS_EXPR)
6902 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6903 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6904 ? MINUS_EXPR : PLUS_EXPR,
6905 arg1, TREE_OPERAND (arg0, 1), 0))
6906 && ! TREE_CONSTANT_OVERFLOW (tem))
6907 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6908
6909 /* Likewise, we can simplify a comparison of a real constant with
6910 a MINUS_EXPR whose first operand is also a real constant, i.e.
6911 (c1 - x) < c2 becomes x > c1-c2. */
6912 if (flag_unsafe_math_optimizations
6913 && TREE_CODE (arg1) == REAL_CST
6914 && TREE_CODE (arg0) == MINUS_EXPR
6915 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
6916 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
6917 arg1, 0))
6918 && ! TREE_CONSTANT_OVERFLOW (tem))
6919 return fold (build (swap_tree_comparison (code), type,
6920 TREE_OPERAND (arg0, 1), tem));
6921
6922 /* Fold comparisons against built-in math functions. */
6923 if (TREE_CODE (arg1) == REAL_CST
6924 && flag_unsafe_math_optimizations
6925 && ! flag_errno_math)
6926 {
6927 enum built_in_function fcode = builtin_mathfn_code (arg0);
6928
6929 if (fcode != END_BUILTINS)
6930 {
6931 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
6932 if (tem != NULL_TREE)
6933 return tem;
6934 }
6935 }
6936 }
6937
6938 /* Convert foo++ == CONST into ++foo == CONST + INCR.
6939 First, see if one arg is constant; find the constant arg
6940 and the other one. */
6941 {
6942 tree constop = 0, varop = NULL_TREE;
6943 int constopnum = -1;
6944
6945 if (TREE_CONSTANT (arg1))
6946 constopnum = 1, constop = arg1, varop = arg0;
6947 if (TREE_CONSTANT (arg0))
6948 constopnum = 0, constop = arg0, varop = arg1;
6949
6950 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
6951 {
6952 /* This optimization is invalid for ordered comparisons
6953 if CONST+INCR overflows or if foo+incr might overflow.
6954 This optimization is invalid for floating point due to rounding.
6955 For pointer types we assume overflow doesn't happen. */
6956 if (POINTER_TYPE_P (TREE_TYPE (varop))
6957 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6958 && (code == EQ_EXPR || code == NE_EXPR)))
6959 {
6960 tree newconst
6961 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
6962 constop, TREE_OPERAND (varop, 1)));
6963
6964 /* Do not overwrite the current varop to be a preincrement,
6965 create a new node so that we won't confuse our caller who
6966 might create trees and throw them away, reusing the
6967 arguments that they passed to build. This shows up in
6968 the THEN or ELSE parts of ?: being postincrements. */
6969 varop = build (PREINCREMENT_EXPR, TREE_TYPE (varop),
6970 TREE_OPERAND (varop, 0),
6971 TREE_OPERAND (varop, 1));
6972
6973 /* If VAROP is a reference to a bitfield, we must mask
6974 the constant by the width of the field. */
6975 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6976 && DECL_BIT_FIELD(TREE_OPERAND
6977 (TREE_OPERAND (varop, 0), 1)))
6978 {
6979 int size
6980 = TREE_INT_CST_LOW (DECL_SIZE
6981 (TREE_OPERAND
6982 (TREE_OPERAND (varop, 0), 1)));
6983 tree mask, unsigned_type;
6984 unsigned int precision;
6985 tree folded_compare;
6986
6987 /* First check whether the comparison would come out
6988 always the same. If we don't do that we would
6989 change the meaning with the masking. */
6990 if (constopnum == 0)
6991 folded_compare = fold (build (code, type, constop,
6992 TREE_OPERAND (varop, 0)));
6993 else
6994 folded_compare = fold (build (code, type,
6995 TREE_OPERAND (varop, 0),
6996 constop));
6997 if (integer_zerop (folded_compare)
6998 || integer_onep (folded_compare))
6999 return omit_one_operand (type, folded_compare, varop);
7000
7001 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
7002 precision = TYPE_PRECISION (unsigned_type);
7003 mask = build_int_2 (~0, ~0);
7004 TREE_TYPE (mask) = unsigned_type;
7005 force_fit_type (mask, 0);
7006 mask = const_binop (RSHIFT_EXPR, mask,
7007 size_int (precision - size), 0);
7008 newconst = fold (build (BIT_AND_EXPR,
7009 TREE_TYPE (varop), newconst,
7010 convert (TREE_TYPE (varop),
7011 mask)));
7012 }
7013
7014 t = build (code, type,
7015 (constopnum == 0) ? newconst : varop,
7016 (constopnum == 1) ? newconst : varop);
7017 return t;
7018 }
7019 }
7020 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
7021 {
7022 if (POINTER_TYPE_P (TREE_TYPE (varop))
7023 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
7024 && (code == EQ_EXPR || code == NE_EXPR)))
7025 {
7026 tree newconst
7027 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
7028 constop, TREE_OPERAND (varop, 1)));
7029
7030 /* Do not overwrite the current varop to be a predecrement,
7031 create a new node so that we won't confuse our caller who
7032 might create trees and throw them away, reusing the
7033 arguments that they passed to build. This shows up in
7034 the THEN or ELSE parts of ?: being postdecrements. */
7035 varop = build (PREDECREMENT_EXPR, TREE_TYPE (varop),
7036 TREE_OPERAND (varop, 0),
7037 TREE_OPERAND (varop, 1));
7038
7039 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7040 && DECL_BIT_FIELD(TREE_OPERAND
7041 (TREE_OPERAND (varop, 0), 1)))
7042 {
7043 int size
7044 = TREE_INT_CST_LOW (DECL_SIZE
7045 (TREE_OPERAND
7046 (TREE_OPERAND (varop, 0), 1)));
7047 tree mask, unsigned_type;
7048 unsigned int precision;
7049 tree folded_compare;
7050
7051 if (constopnum == 0)
7052 folded_compare = fold (build (code, type, constop,
7053 TREE_OPERAND (varop, 0)));
7054 else
7055 folded_compare = fold (build (code, type,
7056 TREE_OPERAND (varop, 0),
7057 constop));
7058 if (integer_zerop (folded_compare)
7059 || integer_onep (folded_compare))
7060 return omit_one_operand (type, folded_compare, varop);
7061
7062 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
7063 precision = TYPE_PRECISION (unsigned_type);
7064 mask = build_int_2 (~0, ~0);
7065 TREE_TYPE (mask) = TREE_TYPE (varop);
7066 force_fit_type (mask, 0);
7067 mask = const_binop (RSHIFT_EXPR, mask,
7068 size_int (precision - size), 0);
7069 newconst = fold (build (BIT_AND_EXPR,
7070 TREE_TYPE (varop), newconst,
7071 convert (TREE_TYPE (varop),
7072 mask)));
7073 }
7074
7075 t = build (code, type,
7076 (constopnum == 0) ? newconst : varop,
7077 (constopnum == 1) ? newconst : varop);
7078 return t;
7079 }
7080 }
7081 }
7082
7083 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7084 This transformation affects the cases which are handled in later
7085 optimizations involving comparisons with non-negative constants. */
7086 if (TREE_CODE (arg1) == INTEGER_CST
7087 && TREE_CODE (arg0) != INTEGER_CST
7088 && tree_int_cst_sgn (arg1) > 0)
7089 {
7090 switch (code)
7091 {
7092 case GE_EXPR:
7093 code = GT_EXPR;
7094 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7095 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7096 break;
7097
7098 case LT_EXPR:
7099 code = LE_EXPR;
7100 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7101 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7102 break;
7103
7104 default:
7105 break;
7106 }
7107 }
7108
7109 /* Comparisons with the highest or lowest possible integer of
7110 the specified size will have known values. */
7111 {
7112 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7113
7114 if (TREE_CODE (arg1) == INTEGER_CST
7115 && ! TREE_CONSTANT_OVERFLOW (arg1)
7116 && width <= HOST_BITS_PER_WIDE_INT
7117 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7118 || POINTER_TYPE_P (TREE_TYPE (arg1))))
7119 {
7120 unsigned HOST_WIDE_INT signed_max;
7121 unsigned HOST_WIDE_INT max, min;
7122
7123 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7124
7125 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7126 {
7127 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7128 min = 0;
7129 }
7130 else
7131 {
7132 max = signed_max;
7133 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7134 }
7135
7136 if (TREE_INT_CST_HIGH (arg1) == 0
7137 && TREE_INT_CST_LOW (arg1) == max)
7138 switch (code)
7139 {
7140 case GT_EXPR:
7141 return omit_one_operand (type,
7142 convert (type, integer_zero_node),
7143 arg0);
7144 case GE_EXPR:
7145 code = EQ_EXPR;
7146 if (t == orig_t)
7147 t = copy_node (t);
7148 TREE_SET_CODE (t, EQ_EXPR);
7149 break;
7150 case LE_EXPR:
7151 return omit_one_operand (type,
7152 convert (type, integer_one_node),
7153 arg0);
7154 case LT_EXPR:
7155 code = NE_EXPR;
7156 if (t == orig_t)
7157 t = copy_node (t);
7158 TREE_SET_CODE (t, NE_EXPR);
7159 break;
7160
7161 /* The GE_EXPR and LT_EXPR cases above are not normally
7162 reached because of previous transformations. */
7163
7164 default:
7165 break;
7166 }
7167 else if (TREE_INT_CST_HIGH (arg1) == 0
7168 && TREE_INT_CST_LOW (arg1) == max - 1)
7169 switch (code)
7170 {
7171 case GT_EXPR:
7172 code = EQ_EXPR;
7173 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7174 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7175 break;
7176 case LE_EXPR:
7177 code = NE_EXPR;
7178 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7179 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7180 break;
7181 default:
7182 break;
7183 }
7184 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7185 && TREE_INT_CST_LOW (arg1) == min)
7186 switch (code)
7187 {
7188 case LT_EXPR:
7189 return omit_one_operand (type,
7190 convert (type, integer_zero_node),
7191 arg0);
7192 case LE_EXPR:
7193 code = EQ_EXPR;
7194 if (t == orig_t)
7195 t = copy_node (t);
7196 TREE_SET_CODE (t, EQ_EXPR);
7197 break;
7198
7199 case GE_EXPR:
7200 return omit_one_operand (type,
7201 convert (type, integer_one_node),
7202 arg0);
7203 case GT_EXPR:
7204 code = NE_EXPR;
7205 if (t == orig_t)
7206 t = copy_node (t);
7207 TREE_SET_CODE (t, NE_EXPR);
7208 break;
7209
7210 default:
7211 break;
7212 }
7213 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7214 && TREE_INT_CST_LOW (arg1) == min + 1)
7215 switch (code)
7216 {
7217 case GE_EXPR:
7218 code = NE_EXPR;
7219 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7220 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7221 break;
7222 case LT_EXPR:
7223 code = EQ_EXPR;
7224 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7225 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7226 break;
7227 default:
7228 break;
7229 }
7230
7231 else if (TREE_INT_CST_HIGH (arg1) == 0
7232 && TREE_INT_CST_LOW (arg1) == signed_max
7233 && TREE_UNSIGNED (TREE_TYPE (arg1))
7234 /* signed_type does not work on pointer types. */
7235 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7236 {
7237 /* The following case also applies to X < signed_max+1
7238 and X >= signed_max+1 because previous transformations. */
7239 if (code == LE_EXPR || code == GT_EXPR)
7240 {
7241 tree st0, st1;
7242 st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
7243 st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
7244 return fold
7245 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7246 type, convert (st0, arg0),
7247 convert (st1, integer_zero_node)));
7248 }
7249 }
7250 }
7251 }
7252
7253 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7254 a MINUS_EXPR of a constant, we can convert it into a comparison with
7255 a revised constant as long as no overflow occurs. */
7256 if ((code == EQ_EXPR || code == NE_EXPR)
7257 && TREE_CODE (arg1) == INTEGER_CST
7258 && (TREE_CODE (arg0) == PLUS_EXPR
7259 || TREE_CODE (arg0) == MINUS_EXPR)
7260 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7261 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7262 ? MINUS_EXPR : PLUS_EXPR,
7263 arg1, TREE_OPERAND (arg0, 1), 0))
7264 && ! TREE_CONSTANT_OVERFLOW (tem))
7265 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7266
7267 /* Similarly for a NEGATE_EXPR. */
7268 else if ((code == EQ_EXPR || code == NE_EXPR)
7269 && TREE_CODE (arg0) == NEGATE_EXPR
7270 && TREE_CODE (arg1) == INTEGER_CST
7271 && 0 != (tem = negate_expr (arg1))
7272 && TREE_CODE (tem) == INTEGER_CST
7273 && ! TREE_CONSTANT_OVERFLOW (tem))
7274 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7275
7276 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7277 for !=. Don't do this for ordered comparisons due to overflow. */
7278 else if ((code == NE_EXPR || code == EQ_EXPR)
7279 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7280 return fold (build (code, type,
7281 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7282
7283 /* If we are widening one operand of an integer comparison,
7284 see if the other operand is similarly being widened. Perhaps we
7285 can do the comparison in the narrower type. */
7286 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7287 && TREE_CODE (arg0) == NOP_EXPR
7288 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7289 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7290 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7291 || (TREE_CODE (t1) == INTEGER_CST
7292 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7293 return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
7294
7295 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7296 constant, we can simplify it. */
7297 else if (TREE_CODE (arg1) == INTEGER_CST
7298 && (TREE_CODE (arg0) == MIN_EXPR
7299 || TREE_CODE (arg0) == MAX_EXPR)
7300 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7301 return optimize_minmax_comparison (t);
7302
7303 /* If we are comparing an ABS_EXPR with a constant, we can
7304 convert all the cases into explicit comparisons, but they may
7305 well not be faster than doing the ABS and one comparison.
7306 But ABS (X) <= C is a range comparison, which becomes a subtraction
7307 and a comparison, and is probably faster. */
7308 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7309 && TREE_CODE (arg0) == ABS_EXPR
7310 && ! TREE_SIDE_EFFECTS (arg0)
7311 && (0 != (tem = negate_expr (arg1)))
7312 && TREE_CODE (tem) == INTEGER_CST
7313 && ! TREE_CONSTANT_OVERFLOW (tem))
7314 return fold (build (TRUTH_ANDIF_EXPR, type,
7315 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
7316 build (LE_EXPR, type,
7317 TREE_OPERAND (arg0, 0), arg1)));
7318
7319 /* If this is an EQ or NE comparison with zero and ARG0 is
7320 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7321 two operations, but the latter can be done in one less insn
7322 on machines that have only two-operand insns or on which a
7323 constant cannot be the first operand. */
7324 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7325 && TREE_CODE (arg0) == BIT_AND_EXPR)
7326 {
7327 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
7328 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
7329 return
7330 fold (build (code, type,
7331 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7332 build (RSHIFT_EXPR,
7333 TREE_TYPE (TREE_OPERAND (arg0, 0)),
7334 TREE_OPERAND (arg0, 1),
7335 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
7336 convert (TREE_TYPE (arg0),
7337 integer_one_node)),
7338 arg1));
7339 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7340 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7341 return
7342 fold (build (code, type,
7343 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7344 build (RSHIFT_EXPR,
7345 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7346 TREE_OPERAND (arg0, 0),
7347 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
7348 convert (TREE_TYPE (arg0),
7349 integer_one_node)),
7350 arg1));
7351 }
7352
7353 /* If this is an NE or EQ comparison of zero against the result of a
7354 signed MOD operation whose second operand is a power of 2, make
7355 the MOD operation unsigned since it is simpler and equivalent. */
7356 if ((code == NE_EXPR || code == EQ_EXPR)
7357 && integer_zerop (arg1)
7358 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
7359 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7360 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7361 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7362 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7363 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7364 {
7365 tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
7366 tree newmod = build (TREE_CODE (arg0), newtype,
7367 convert (newtype, TREE_OPERAND (arg0, 0)),
7368 convert (newtype, TREE_OPERAND (arg0, 1)));
7369
7370 return build (code, type, newmod, convert (newtype, arg1));
7371 }
7372
7373 /* If this is an NE comparison of zero with an AND of one, remove the
7374 comparison since the AND will give the correct value. */
7375 if (code == NE_EXPR && integer_zerop (arg1)
7376 && TREE_CODE (arg0) == BIT_AND_EXPR
7377 && integer_onep (TREE_OPERAND (arg0, 1)))
7378 return convert (type, arg0);
7379
7380 /* If we have (A & C) == C where C is a power of 2, convert this into
7381 (A & C) != 0. Similarly for NE_EXPR. */
7382 if ((code == EQ_EXPR || code == NE_EXPR)
7383 && TREE_CODE (arg0) == BIT_AND_EXPR
7384 && integer_pow2p (TREE_OPERAND (arg0, 1))
7385 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7386 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7387 arg0, integer_zero_node));
7388
7389 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7390 2, then fold the expression into shifts and logical operations. */
7391 tem = fold_single_bit_test (code, arg0, arg1, type);
7392 if (tem)
7393 return tem;
7394
7395 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7396 Similarly for NE_EXPR. */
7397 if ((code == EQ_EXPR || code == NE_EXPR)
7398 && TREE_CODE (arg0) == BIT_AND_EXPR
7399 && TREE_CODE (arg1) == INTEGER_CST
7400 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7401 {
7402 tree dandnotc
7403 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7404 arg1, build1 (BIT_NOT_EXPR,
7405 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7406 TREE_OPERAND (arg0, 1))));
7407 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7408 if (integer_nonzerop (dandnotc))
7409 return omit_one_operand (type, rslt, arg0);
7410 }
7411
7412 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7413 Similarly for NE_EXPR. */
7414 if ((code == EQ_EXPR || code == NE_EXPR)
7415 && TREE_CODE (arg0) == BIT_IOR_EXPR
7416 && TREE_CODE (arg1) == INTEGER_CST
7417 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7418 {
7419 tree candnotd
7420 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7421 TREE_OPERAND (arg0, 1),
7422 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
7423 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7424 if (integer_nonzerop (candnotd))
7425 return omit_one_operand (type, rslt, arg0);
7426 }
7427
7428 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7429 and similarly for >= into !=. */
7430 if ((code == LT_EXPR || code == GE_EXPR)
7431 && TREE_UNSIGNED (TREE_TYPE (arg0))
7432 && TREE_CODE (arg1) == LSHIFT_EXPR
7433 && integer_onep (TREE_OPERAND (arg1, 0)))
7434 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7435 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7436 TREE_OPERAND (arg1, 1)),
7437 convert (TREE_TYPE (arg0), integer_zero_node));
7438
7439 else if ((code == LT_EXPR || code == GE_EXPR)
7440 && TREE_UNSIGNED (TREE_TYPE (arg0))
7441 && (TREE_CODE (arg1) == NOP_EXPR
7442 || TREE_CODE (arg1) == CONVERT_EXPR)
7443 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7444 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7445 return
7446 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7447 convert (TREE_TYPE (arg0),
7448 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7449 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
7450 convert (TREE_TYPE (arg0), integer_zero_node));
7451
7452 /* Simplify comparison of something with itself. (For IEEE
7453 floating-point, we can only do some of these simplifications.) */
7454 if (operand_equal_p (arg0, arg1, 0))
7455 {
7456 switch (code)
7457 {
7458 case EQ_EXPR:
7459 case GE_EXPR:
7460 case LE_EXPR:
7461 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7462 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7463 return constant_boolean_node (1, type);
7464 code = EQ_EXPR;
7465 if (t == orig_t)
7466 t = copy_node (t);
7467 TREE_SET_CODE (t, code);
7468 break;
7469
7470 case NE_EXPR:
7471 /* For NE, we can only do this simplification if integer
7472 or we don't honor IEEE floating point NaNs. */
7473 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7474 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7475 break;
7476 /* ... fall through ... */
7477 case GT_EXPR:
7478 case LT_EXPR:
7479 return constant_boolean_node (0, type);
7480 default:
7481 abort ();
7482 }
7483 }
7484
7485 /* If we are comparing an expression that just has comparisons
7486 of two integer values, arithmetic expressions of those comparisons,
7487 and constants, we can simplify it. There are only three cases
7488 to check: the two values can either be equal, the first can be
7489 greater, or the second can be greater. Fold the expression for
7490 those three values. Since each value must be 0 or 1, we have
7491 eight possibilities, each of which corresponds to the constant 0
7492 or 1 or one of the six possible comparisons.
7493
7494 This handles common cases like (a > b) == 0 but also handles
7495 expressions like ((x > y) - (y > x)) > 0, which supposedly
7496 occur in macroized code. */
7497
7498 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7499 {
7500 tree cval1 = 0, cval2 = 0;
7501 int save_p = 0;
7502
7503 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7504 /* Don't handle degenerate cases here; they should already
7505 have been handled anyway. */
7506 && cval1 != 0 && cval2 != 0
7507 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7508 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7509 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7510 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7511 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7512 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7513 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7514 {
7515 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7516 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7517
7518 /* We can't just pass T to eval_subst in case cval1 or cval2
7519 was the same as ARG1. */
7520
7521 tree high_result
7522 = fold (build (code, type,
7523 eval_subst (arg0, cval1, maxval, cval2, minval),
7524 arg1));
7525 tree equal_result
7526 = fold (build (code, type,
7527 eval_subst (arg0, cval1, maxval, cval2, maxval),
7528 arg1));
7529 tree low_result
7530 = fold (build (code, type,
7531 eval_subst (arg0, cval1, minval, cval2, maxval),
7532 arg1));
7533
7534 /* All three of these results should be 0 or 1. Confirm they
7535 are. Then use those values to select the proper code
7536 to use. */
7537
7538 if ((integer_zerop (high_result)
7539 || integer_onep (high_result))
7540 && (integer_zerop (equal_result)
7541 || integer_onep (equal_result))
7542 && (integer_zerop (low_result)
7543 || integer_onep (low_result)))
7544 {
7545 /* Make a 3-bit mask with the high-order bit being the
7546 value for `>', the next for '=', and the low for '<'. */
7547 switch ((integer_onep (high_result) * 4)
7548 + (integer_onep (equal_result) * 2)
7549 + integer_onep (low_result))
7550 {
7551 case 0:
7552 /* Always false. */
7553 return omit_one_operand (type, integer_zero_node, arg0);
7554 case 1:
7555 code = LT_EXPR;
7556 break;
7557 case 2:
7558 code = EQ_EXPR;
7559 break;
7560 case 3:
7561 code = LE_EXPR;
7562 break;
7563 case 4:
7564 code = GT_EXPR;
7565 break;
7566 case 5:
7567 code = NE_EXPR;
7568 break;
7569 case 6:
7570 code = GE_EXPR;
7571 break;
7572 case 7:
7573 /* Always true. */
7574 return omit_one_operand (type, integer_one_node, arg0);
7575 }
7576
7577 t = build (code, type, cval1, cval2);
7578 if (save_p)
7579 return save_expr (t);
7580 else
7581 return fold (t);
7582 }
7583 }
7584 }
7585
7586 /* If this is a comparison of a field, we may be able to simplify it. */
7587 if (((TREE_CODE (arg0) == COMPONENT_REF
7588 && (*lang_hooks.can_use_bit_fields_p) ())
7589 || TREE_CODE (arg0) == BIT_FIELD_REF)
7590 && (code == EQ_EXPR || code == NE_EXPR)
7591 /* Handle the constant case even without -O
7592 to make sure the warnings are given. */
7593 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
7594 {
7595 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
7596 return t1 ? t1 : t;
7597 }
7598
7599 /* If this is a comparison of complex values and either or both sides
7600 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7601 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7602 This may prevent needless evaluations. */
7603 if ((code == EQ_EXPR || code == NE_EXPR)
7604 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
7605 && (TREE_CODE (arg0) == COMPLEX_EXPR
7606 || TREE_CODE (arg1) == COMPLEX_EXPR
7607 || TREE_CODE (arg0) == COMPLEX_CST
7608 || TREE_CODE (arg1) == COMPLEX_CST))
7609 {
7610 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
7611 tree real0, imag0, real1, imag1;
7612
7613 arg0 = save_expr (arg0);
7614 arg1 = save_expr (arg1);
7615 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
7616 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
7617 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
7618 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
7619
7620 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
7621 : TRUTH_ORIF_EXPR),
7622 type,
7623 fold (build (code, type, real0, real1)),
7624 fold (build (code, type, imag0, imag1))));
7625 }
7626
7627 /* Optimize comparisons of strlen vs zero to a compare of the
7628 first character of the string vs zero. To wit,
7629 strlen(ptr) == 0 => *ptr == 0
7630 strlen(ptr) != 0 => *ptr != 0
7631 Other cases should reduce to one of these two (or a constant)
7632 due to the return value of strlen being unsigned. */
7633 if ((code == EQ_EXPR || code == NE_EXPR)
7634 && integer_zerop (arg1)
7635 && TREE_CODE (arg0) == CALL_EXPR)
7636 {
7637 tree fndecl = get_callee_fndecl (arg0);
7638 tree arglist;
7639
7640 if (fndecl
7641 && DECL_BUILT_IN (fndecl)
7642 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
7643 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
7644 && (arglist = TREE_OPERAND (arg0, 1))
7645 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
7646 && ! TREE_CHAIN (arglist))
7647 return fold (build (code, type,
7648 build1 (INDIRECT_REF, char_type_node,
7649 TREE_VALUE(arglist)),
7650 integer_zero_node));
7651 }
7652
7653 /* From here on, the only cases we handle are when the result is
7654 known to be a constant.
7655
7656 To compute GT, swap the arguments and do LT.
7657 To compute GE, do LT and invert the result.
7658 To compute LE, swap the arguments, do LT and invert the result.
7659 To compute NE, do EQ and invert the result.
7660
7661 Therefore, the code below must handle only EQ and LT. */
7662
7663 if (code == LE_EXPR || code == GT_EXPR)
7664 {
7665 tem = arg0, arg0 = arg1, arg1 = tem;
7666 code = swap_tree_comparison (code);
7667 }
7668
7669 /* Note that it is safe to invert for real values here because we
7670 will check below in the one case that it matters. */
7671
7672 t1 = NULL_TREE;
7673 invert = 0;
7674 if (code == NE_EXPR || code == GE_EXPR)
7675 {
7676 invert = 1;
7677 code = invert_tree_comparison (code);
7678 }
7679
7680 /* Compute a result for LT or EQ if args permit;
7681 otherwise return T. */
7682 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
7683 {
7684 if (code == EQ_EXPR)
7685 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
7686 else
7687 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
7688 ? INT_CST_LT_UNSIGNED (arg0, arg1)
7689 : INT_CST_LT (arg0, arg1)),
7690 0);
7691 }
7692
7693 #if 0 /* This is no longer useful, but breaks some real code. */
7694 /* Assume a nonexplicit constant cannot equal an explicit one,
7695 since such code would be undefined anyway.
7696 Exception: on sysvr4, using #pragma weak,
7697 a label can come out as 0. */
7698 else if (TREE_CODE (arg1) == INTEGER_CST
7699 && !integer_zerop (arg1)
7700 && TREE_CONSTANT (arg0)
7701 && TREE_CODE (arg0) == ADDR_EXPR
7702 && code == EQ_EXPR)
7703 t1 = build_int_2 (0, 0);
7704 #endif
7705 /* Two real constants can be compared explicitly. */
7706 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
7707 {
7708 /* If either operand is a NaN, the result is false with two
7709 exceptions: First, an NE_EXPR is true on NaNs, but that case
7710 is already handled correctly since we will be inverting the
7711 result for NE_EXPR. Second, if we had inverted a LE_EXPR
7712 or a GE_EXPR into a LT_EXPR, we must return true so that it
7713 will be inverted into false. */
7714
7715 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
7716 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
7717 t1 = build_int_2 (invert && code == LT_EXPR, 0);
7718
7719 else if (code == EQ_EXPR)
7720 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
7721 TREE_REAL_CST (arg1)),
7722 0);
7723 else
7724 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
7725 TREE_REAL_CST (arg1)),
7726 0);
7727 }
7728
7729 if (t1 == NULL_TREE)
7730 return t;
7731
7732 if (invert)
7733 TREE_INT_CST_LOW (t1) ^= 1;
7734
7735 TREE_TYPE (t1) = type;
7736 if (TREE_CODE (type) == BOOLEAN_TYPE)
7737 return (*lang_hooks.truthvalue_conversion) (t1);
7738 return t1;
7739
7740 case COND_EXPR:
7741 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
7742 so all simple results must be passed through pedantic_non_lvalue. */
7743 if (TREE_CODE (arg0) == INTEGER_CST)
7744 return pedantic_non_lvalue
7745 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
7746 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
7747 return pedantic_omit_one_operand (type, arg1, arg0);
7748
7749 /* If the second operand is zero, invert the comparison and swap
7750 the second and third operands. Likewise if the second operand
7751 is constant and the third is not or if the third operand is
7752 equivalent to the first operand of the comparison. */
7753
7754 if (integer_zerop (arg1)
7755 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
7756 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7757 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7758 TREE_OPERAND (t, 2),
7759 TREE_OPERAND (arg0, 1))))
7760 {
7761 /* See if this can be inverted. If it can't, possibly because
7762 it was a floating-point inequality comparison, don't do
7763 anything. */
7764 tem = invert_truthvalue (arg0);
7765
7766 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7767 {
7768 t = build (code, type, tem,
7769 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
7770 arg0 = tem;
7771 /* arg1 should be the first argument of the new T. */
7772 arg1 = TREE_OPERAND (t, 1);
7773 STRIP_NOPS (arg1);
7774 }
7775 }
7776
7777 /* If we have A op B ? A : C, we may be able to convert this to a
7778 simpler expression, depending on the operation and the values
7779 of B and C. Signed zeros prevent all of these transformations,
7780 for reasons given above each one. */
7781
7782 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7783 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7784 arg1, TREE_OPERAND (arg0, 1))
7785 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
7786 {
7787 tree arg2 = TREE_OPERAND (t, 2);
7788 enum tree_code comp_code = TREE_CODE (arg0);
7789
7790 STRIP_NOPS (arg2);
7791
7792 /* If we have A op 0 ? A : -A, consider applying the following
7793 transformations:
7794
7795 A == 0? A : -A same as -A
7796 A != 0? A : -A same as A
7797 A >= 0? A : -A same as abs (A)
7798 A > 0? A : -A same as abs (A)
7799 A <= 0? A : -A same as -abs (A)
7800 A < 0? A : -A same as -abs (A)
7801
7802 None of these transformations work for modes with signed
7803 zeros. If A is +/-0, the first two transformations will
7804 change the sign of the result (from +0 to -0, or vice
7805 versa). The last four will fix the sign of the result,
7806 even though the original expressions could be positive or
7807 negative, depending on the sign of A.
7808
7809 Note that all these transformations are correct if A is
7810 NaN, since the two alternatives (A and -A) are also NaNs. */
7811 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
7812 ? real_zerop (TREE_OPERAND (arg0, 1))
7813 : integer_zerop (TREE_OPERAND (arg0, 1)))
7814 && TREE_CODE (arg2) == NEGATE_EXPR
7815 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
7816 switch (comp_code)
7817 {
7818 case EQ_EXPR:
7819 return
7820 pedantic_non_lvalue
7821 (convert (type,
7822 negate_expr
7823 (convert (TREE_TYPE (TREE_OPERAND (t, 1)),
7824 arg1))));
7825 case NE_EXPR:
7826 return pedantic_non_lvalue (convert (type, arg1));
7827 case GE_EXPR:
7828 case GT_EXPR:
7829 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7830 arg1 = convert ((*lang_hooks.types.signed_type)
7831 (TREE_TYPE (arg1)), arg1);
7832 return pedantic_non_lvalue
7833 (convert (type, fold (build1 (ABS_EXPR,
7834 TREE_TYPE (arg1), arg1))));
7835 case LE_EXPR:
7836 case LT_EXPR:
7837 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7838 arg1 = convert ((lang_hooks.types.signed_type)
7839 (TREE_TYPE (arg1)), arg1);
7840 return pedantic_non_lvalue
7841 (negate_expr (convert (type,
7842 fold (build1 (ABS_EXPR,
7843 TREE_TYPE (arg1),
7844 arg1)))));
7845 default:
7846 abort ();
7847 }
7848
7849 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
7850 A == 0 ? A : 0 is always 0 unless A is -0. Note that
7851 both transformations are correct when A is NaN: A != 0
7852 is then true, and A == 0 is false. */
7853
7854 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
7855 {
7856 if (comp_code == NE_EXPR)
7857 return pedantic_non_lvalue (convert (type, arg1));
7858 else if (comp_code == EQ_EXPR)
7859 return pedantic_non_lvalue (convert (type, integer_zero_node));
7860 }
7861
7862 /* Try some transformations of A op B ? A : B.
7863
7864 A == B? A : B same as B
7865 A != B? A : B same as A
7866 A >= B? A : B same as max (A, B)
7867 A > B? A : B same as max (B, A)
7868 A <= B? A : B same as min (A, B)
7869 A < B? A : B same as min (B, A)
7870
7871 As above, these transformations don't work in the presence
7872 of signed zeros. For example, if A and B are zeros of
7873 opposite sign, the first two transformations will change
7874 the sign of the result. In the last four, the original
7875 expressions give different results for (A=+0, B=-0) and
7876 (A=-0, B=+0), but the transformed expressions do not.
7877
7878 The first two transformations are correct if either A or B
7879 is a NaN. In the first transformation, the condition will
7880 be false, and B will indeed be chosen. In the case of the
7881 second transformation, the condition A != B will be true,
7882 and A will be chosen.
7883
7884 The conversions to max() and min() are not correct if B is
7885 a number and A is not. The conditions in the original
7886 expressions will be false, so all four give B. The min()
7887 and max() versions would give a NaN instead. */
7888 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
7889 arg2, TREE_OPERAND (arg0, 0)))
7890 {
7891 tree comp_op0 = TREE_OPERAND (arg0, 0);
7892 tree comp_op1 = TREE_OPERAND (arg0, 1);
7893 tree comp_type = TREE_TYPE (comp_op0);
7894
7895 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
7896 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
7897 {
7898 comp_type = type;
7899 comp_op0 = arg1;
7900 comp_op1 = arg2;
7901 }
7902
7903 switch (comp_code)
7904 {
7905 case EQ_EXPR:
7906 return pedantic_non_lvalue (convert (type, arg2));
7907 case NE_EXPR:
7908 return pedantic_non_lvalue (convert (type, arg1));
7909 case LE_EXPR:
7910 case LT_EXPR:
7911 /* In C++ a ?: expression can be an lvalue, so put the
7912 operand which will be used if they are equal first
7913 so that we can convert this back to the
7914 corresponding COND_EXPR. */
7915 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
7916 return pedantic_non_lvalue
7917 (convert (type, fold (build (MIN_EXPR, comp_type,
7918 (comp_code == LE_EXPR
7919 ? comp_op0 : comp_op1),
7920 (comp_code == LE_EXPR
7921 ? comp_op1 : comp_op0)))));
7922 break;
7923 case GE_EXPR:
7924 case GT_EXPR:
7925 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
7926 return pedantic_non_lvalue
7927 (convert (type, fold (build (MAX_EXPR, comp_type,
7928 (comp_code == GE_EXPR
7929 ? comp_op0 : comp_op1),
7930 (comp_code == GE_EXPR
7931 ? comp_op1 : comp_op0)))));
7932 break;
7933 default:
7934 abort ();
7935 }
7936 }
7937
7938 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
7939 we might still be able to simplify this. For example,
7940 if C1 is one less or one more than C2, this might have started
7941 out as a MIN or MAX and been transformed by this function.
7942 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
7943
7944 if (INTEGRAL_TYPE_P (type)
7945 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7946 && TREE_CODE (arg2) == INTEGER_CST)
7947 switch (comp_code)
7948 {
7949 case EQ_EXPR:
7950 /* We can replace A with C1 in this case. */
7951 arg1 = convert (type, TREE_OPERAND (arg0, 1));
7952 t = build (code, type, TREE_OPERAND (t, 0), arg1,
7953 TREE_OPERAND (t, 2));
7954 break;
7955
7956 case LT_EXPR:
7957 /* If C1 is C2 + 1, this is min(A, C2). */
7958 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
7959 && operand_equal_p (TREE_OPERAND (arg0, 1),
7960 const_binop (PLUS_EXPR, arg2,
7961 integer_one_node, 0), 1))
7962 return pedantic_non_lvalue
7963 (fold (build (MIN_EXPR, type, arg1, arg2)));
7964 break;
7965
7966 case LE_EXPR:
7967 /* If C1 is C2 - 1, this is min(A, C2). */
7968 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
7969 && operand_equal_p (TREE_OPERAND (arg0, 1),
7970 const_binop (MINUS_EXPR, arg2,
7971 integer_one_node, 0), 1))
7972 return pedantic_non_lvalue
7973 (fold (build (MIN_EXPR, type, arg1, arg2)));
7974 break;
7975
7976 case GT_EXPR:
7977 /* If C1 is C2 - 1, this is max(A, C2). */
7978 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
7979 && operand_equal_p (TREE_OPERAND (arg0, 1),
7980 const_binop (MINUS_EXPR, arg2,
7981 integer_one_node, 0), 1))
7982 return pedantic_non_lvalue
7983 (fold (build (MAX_EXPR, type, arg1, arg2)));
7984 break;
7985
7986 case GE_EXPR:
7987 /* If C1 is C2 + 1, this is max(A, C2). */
7988 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
7989 && operand_equal_p (TREE_OPERAND (arg0, 1),
7990 const_binop (PLUS_EXPR, arg2,
7991 integer_one_node, 0), 1))
7992 return pedantic_non_lvalue
7993 (fold (build (MAX_EXPR, type, arg1, arg2)));
7994 break;
7995 case NE_EXPR:
7996 break;
7997 default:
7998 abort ();
7999 }
8000 }
8001
8002 /* If the second operand is simpler than the third, swap them
8003 since that produces better jump optimization results. */
8004 if ((TREE_CONSTANT (arg1) || DECL_P (arg1)
8005 || TREE_CODE (arg1) == SAVE_EXPR)
8006 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
8007 || DECL_P (TREE_OPERAND (t, 2))
8008 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
8009 {
8010 /* See if this can be inverted. If it can't, possibly because
8011 it was a floating-point inequality comparison, don't do
8012 anything. */
8013 tem = invert_truthvalue (arg0);
8014
8015 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8016 {
8017 t = build (code, type, tem,
8018 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
8019 arg0 = tem;
8020 /* arg1 should be the first argument of the new T. */
8021 arg1 = TREE_OPERAND (t, 1);
8022 STRIP_NOPS (arg1);
8023 }
8024 }
8025
8026 /* Convert A ? 1 : 0 to simply A. */
8027 if (integer_onep (TREE_OPERAND (t, 1))
8028 && integer_zerop (TREE_OPERAND (t, 2))
8029 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8030 call to fold will try to move the conversion inside
8031 a COND, which will recurse. In that case, the COND_EXPR
8032 is probably the best choice, so leave it alone. */
8033 && type == TREE_TYPE (arg0))
8034 return pedantic_non_lvalue (arg0);
8035
8036 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8037 over COND_EXPR in cases such as floating point comparisons. */
8038 if (integer_zerop (TREE_OPERAND (t, 1))
8039 && integer_onep (TREE_OPERAND (t, 2))
8040 && truth_value_p (TREE_CODE (arg0)))
8041 return pedantic_non_lvalue (convert (type,
8042 invert_truthvalue (arg0)));
8043
8044 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8045 operation is simply A & 2. */
8046
8047 if (integer_zerop (TREE_OPERAND (t, 2))
8048 && TREE_CODE (arg0) == NE_EXPR
8049 && integer_zerop (TREE_OPERAND (arg0, 1))
8050 && integer_pow2p (arg1)
8051 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8052 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8053 arg1, 1))
8054 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
8055
8056 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8057 if (integer_zerop (TREE_OPERAND (t, 2))
8058 && truth_value_p (TREE_CODE (arg0))
8059 && truth_value_p (TREE_CODE (arg1)))
8060 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
8061 arg0, arg1)));
8062
8063 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8064 if (integer_onep (TREE_OPERAND (t, 2))
8065 && truth_value_p (TREE_CODE (arg0))
8066 && truth_value_p (TREE_CODE (arg1)))
8067 {
8068 /* Only perform transformation if ARG0 is easily inverted. */
8069 tem = invert_truthvalue (arg0);
8070 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8071 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
8072 tem, arg1)));
8073 }
8074
8075 return t;
8076
8077 case COMPOUND_EXPR:
8078 /* When pedantic, a compound expression can be neither an lvalue
8079 nor an integer constant expression. */
8080 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
8081 return t;
8082 /* Don't let (0, 0) be null pointer constant. */
8083 if (integer_zerop (arg1))
8084 return build1 (NOP_EXPR, type, arg1);
8085 return convert (type, arg1);
8086
8087 case COMPLEX_EXPR:
8088 if (wins)
8089 return build_complex (type, arg0, arg1);
8090 return t;
8091
8092 case REALPART_EXPR:
8093 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8094 return t;
8095 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8096 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8097 TREE_OPERAND (arg0, 1));
8098 else if (TREE_CODE (arg0) == COMPLEX_CST)
8099 return TREE_REALPART (arg0);
8100 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8101 return fold (build (TREE_CODE (arg0), type,
8102 fold (build1 (REALPART_EXPR, type,
8103 TREE_OPERAND (arg0, 0))),
8104 fold (build1 (REALPART_EXPR,
8105 type, TREE_OPERAND (arg0, 1)))));
8106 return t;
8107
8108 case IMAGPART_EXPR:
8109 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8110 return convert (type, integer_zero_node);
8111 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8112 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8113 TREE_OPERAND (arg0, 0));
8114 else if (TREE_CODE (arg0) == COMPLEX_CST)
8115 return TREE_IMAGPART (arg0);
8116 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8117 return fold (build (TREE_CODE (arg0), type,
8118 fold (build1 (IMAGPART_EXPR, type,
8119 TREE_OPERAND (arg0, 0))),
8120 fold (build1 (IMAGPART_EXPR, type,
8121 TREE_OPERAND (arg0, 1)))));
8122 return t;
8123
8124 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8125 appropriate. */
8126 case CLEANUP_POINT_EXPR:
8127 if (! has_cleanups (arg0))
8128 return TREE_OPERAND (t, 0);
8129
8130 {
8131 enum tree_code code0 = TREE_CODE (arg0);
8132 int kind0 = TREE_CODE_CLASS (code0);
8133 tree arg00 = TREE_OPERAND (arg0, 0);
8134 tree arg01;
8135
8136 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8137 return fold (build1 (code0, type,
8138 fold (build1 (CLEANUP_POINT_EXPR,
8139 TREE_TYPE (arg00), arg00))));
8140
8141 if (kind0 == '<' || kind0 == '2'
8142 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8143 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8144 || code0 == TRUTH_XOR_EXPR)
8145 {
8146 arg01 = TREE_OPERAND (arg0, 1);
8147
8148 if (TREE_CONSTANT (arg00)
8149 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8150 && ! has_cleanups (arg00)))
8151 return fold (build (code0, type, arg00,
8152 fold (build1 (CLEANUP_POINT_EXPR,
8153 TREE_TYPE (arg01), arg01))));
8154
8155 if (TREE_CONSTANT (arg01))
8156 return fold (build (code0, type,
8157 fold (build1 (CLEANUP_POINT_EXPR,
8158 TREE_TYPE (arg00), arg00)),
8159 arg01));
8160 }
8161
8162 return t;
8163 }
8164
8165 case CALL_EXPR:
8166 /* Check for a built-in function. */
8167 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
8168 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
8169 == FUNCTION_DECL)
8170 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
8171 {
8172 tree tmp = fold_builtin (expr);
8173 if (tmp)
8174 return tmp;
8175 }
8176 return t;
8177
8178 default:
8179 return t;
8180 } /* switch (code) */
8181 }
8182
8183 #ifdef ENABLE_FOLD_CHECKING
8184 #undef fold
8185
8186 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8187 static void fold_check_failed (tree, tree);
8188 void print_fold_checksum (tree);
8189
8190 /* When --enable-checking=fold, compute a digest of expr before
8191 and after actual fold call to see if fold did not accidentally
8192 change original expr. */
8193
8194 tree
8195 fold (tree expr)
8196 {
8197 tree ret;
8198 struct md5_ctx ctx;
8199 unsigned char checksum_before[16], checksum_after[16];
8200 htab_t ht;
8201
8202 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8203 md5_init_ctx (&ctx);
8204 fold_checksum_tree (expr, &ctx, ht);
8205 md5_finish_ctx (&ctx, checksum_before);
8206 htab_empty (ht);
8207
8208 ret = fold_1 (expr);
8209
8210 md5_init_ctx (&ctx);
8211 fold_checksum_tree (expr, &ctx, ht);
8212 md5_finish_ctx (&ctx, checksum_after);
8213 htab_delete (ht);
8214
8215 if (memcmp (checksum_before, checksum_after, 16))
8216 fold_check_failed (expr, ret);
8217
8218 return ret;
8219 }
8220
8221 void
8222 print_fold_checksum (tree expr)
8223 {
8224 struct md5_ctx ctx;
8225 unsigned char checksum[16], cnt;
8226 htab_t ht;
8227
8228 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8229 md5_init_ctx (&ctx);
8230 fold_checksum_tree (expr, &ctx, ht);
8231 md5_finish_ctx (&ctx, checksum);
8232 htab_delete (ht);
8233 for (cnt = 0; cnt < 16; ++cnt)
8234 fprintf (stderr, "%02x", checksum[cnt]);
8235 putc ('\n', stderr);
8236 }
8237
8238 static void
8239 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8240 {
8241 internal_error ("fold check: original tree changed by fold");
8242 }
8243
8244 static void
8245 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8246 {
8247 void **slot;
8248 enum tree_code code;
8249 char buf[sizeof (struct tree_decl)];
8250 int i, len;
8251
8252 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8253 > sizeof (struct tree_decl)
8254 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8255 abort ();
8256 if (expr == NULL)
8257 return;
8258 slot = htab_find_slot (ht, expr, INSERT);
8259 if (*slot != NULL)
8260 return;
8261 *slot = expr;
8262 code = TREE_CODE (expr);
8263 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8264 {
8265 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8266 memcpy (buf, expr, tree_size (expr));
8267 expr = (tree) buf;
8268 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8269 }
8270 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8271 {
8272 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8273 memcpy (buf, expr, tree_size (expr));
8274 expr = (tree) buf;
8275 SET_DECL_ASSEMBLER_NAME (expr, NULL);
8276 }
8277 else if (TREE_CODE_CLASS (code) == 't'
8278 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8279 {
8280 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8281 memcpy (buf, expr, tree_size (expr));
8282 expr = (tree) buf;
8283 TYPE_POINTER_TO (expr) = NULL;
8284 TYPE_REFERENCE_TO (expr) = NULL;
8285 }
8286 md5_process_bytes (expr, tree_size (expr), ctx);
8287 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8288 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8289 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8290 len = TREE_CODE_LENGTH (code);
8291 switch (TREE_CODE_CLASS (code))
8292 {
8293 case 'c':
8294 switch (code)
8295 {
8296 case STRING_CST:
8297 md5_process_bytes (TREE_STRING_POINTER (expr),
8298 TREE_STRING_LENGTH (expr), ctx);
8299 break;
8300 case COMPLEX_CST:
8301 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8302 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8303 break;
8304 case VECTOR_CST:
8305 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8306 break;
8307 default:
8308 break;
8309 }
8310 break;
8311 case 'x':
8312 switch (code)
8313 {
8314 case TREE_LIST:
8315 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8316 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8317 break;
8318 case TREE_VEC:
8319 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8320 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8321 break;
8322 default:
8323 break;
8324 }
8325 break;
8326 case 'e':
8327 switch (code)
8328 {
8329 case SAVE_EXPR: len = 2; break;
8330 case GOTO_SUBROUTINE_EXPR: len = 0; break;
8331 case RTL_EXPR: len = 0; break;
8332 case WITH_CLEANUP_EXPR: len = 2; break;
8333 default: break;
8334 }
8335 /* FALLTHROUGH */
8336 case 'r':
8337 case '<':
8338 case '1':
8339 case '2':
8340 case 's':
8341 for (i = 0; i < len; ++i)
8342 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8343 break;
8344 case 'd':
8345 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8346 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8347 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8348 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8349 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8350 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8351 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8352 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8353 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8354 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8355 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8356 break;
8357 case 't':
8358 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
8359 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
8360 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
8361 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
8362 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
8363 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
8364 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
8365 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
8366 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
8367 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
8368 break;
8369 default:
8370 break;
8371 }
8372 }
8373
8374 #endif
8375
8376 /* Perform constant folding and related simplification of initializer
8377 expression EXPR. This behaves identically to "fold" but ignores
8378 potential run-time traps and exceptions that fold must preserve. */
8379
8380 tree
8381 fold_initializer (tree expr)
8382 {
8383 int saved_signaling_nans = flag_signaling_nans;
8384 int saved_trapping_math = flag_trapping_math;
8385 int saved_trapv = flag_trapv;
8386 tree result;
8387
8388 flag_signaling_nans = 0;
8389 flag_trapping_math = 0;
8390 flag_trapv = 0;
8391
8392 result = fold (expr);
8393
8394 flag_signaling_nans = saved_signaling_nans;
8395 flag_trapping_math = saved_trapping_math;
8396 flag_trapv = saved_trapv;
8397
8398 return result;
8399 }
8400
8401 /* Determine if first argument is a multiple of second argument. Return 0 if
8402 it is not, or we cannot easily determined it to be.
8403
8404 An example of the sort of thing we care about (at this point; this routine
8405 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8406 fold cases do now) is discovering that
8407
8408 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8409
8410 is a multiple of
8411
8412 SAVE_EXPR (J * 8)
8413
8414 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8415
8416 This code also handles discovering that
8417
8418 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8419
8420 is a multiple of 8 so we don't have to worry about dealing with a
8421 possible remainder.
8422
8423 Note that we *look* inside a SAVE_EXPR only to determine how it was
8424 calculated; it is not safe for fold to do much of anything else with the
8425 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8426 at run time. For example, the latter example above *cannot* be implemented
8427 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8428 evaluation time of the original SAVE_EXPR is not necessarily the same at
8429 the time the new expression is evaluated. The only optimization of this
8430 sort that would be valid is changing
8431
8432 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8433
8434 divided by 8 to
8435
8436 SAVE_EXPR (I) * SAVE_EXPR (J)
8437
8438 (where the same SAVE_EXPR (J) is used in the original and the
8439 transformed version). */
8440
8441 static int
8442 multiple_of_p (tree type, tree top, tree bottom)
8443 {
8444 if (operand_equal_p (top, bottom, 0))
8445 return 1;
8446
8447 if (TREE_CODE (type) != INTEGER_TYPE)
8448 return 0;
8449
8450 switch (TREE_CODE (top))
8451 {
8452 case MULT_EXPR:
8453 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8454 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8455
8456 case PLUS_EXPR:
8457 case MINUS_EXPR:
8458 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8459 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8460
8461 case LSHIFT_EXPR:
8462 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
8463 {
8464 tree op1, t1;
8465
8466 op1 = TREE_OPERAND (top, 1);
8467 /* const_binop may not detect overflow correctly,
8468 so check for it explicitly here. */
8469 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
8470 > TREE_INT_CST_LOW (op1)
8471 && TREE_INT_CST_HIGH (op1) == 0
8472 && 0 != (t1 = convert (type,
8473 const_binop (LSHIFT_EXPR, size_one_node,
8474 op1, 0)))
8475 && ! TREE_OVERFLOW (t1))
8476 return multiple_of_p (type, t1, bottom);
8477 }
8478 return 0;
8479
8480 case NOP_EXPR:
8481 /* Can't handle conversions from non-integral or wider integral type. */
8482 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
8483 || (TYPE_PRECISION (type)
8484 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
8485 return 0;
8486
8487 /* .. fall through ... */
8488
8489 case SAVE_EXPR:
8490 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
8491
8492 case INTEGER_CST:
8493 if (TREE_CODE (bottom) != INTEGER_CST
8494 || (TREE_UNSIGNED (type)
8495 && (tree_int_cst_sgn (top) < 0
8496 || tree_int_cst_sgn (bottom) < 0)))
8497 return 0;
8498 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
8499 top, bottom, 0));
8500
8501 default:
8502 return 0;
8503 }
8504 }
8505
8506 /* Return true if `t' is known to be non-negative. */
8507
8508 int
8509 tree_expr_nonnegative_p (tree t)
8510 {
8511 switch (TREE_CODE (t))
8512 {
8513 case ABS_EXPR:
8514 case FFS_EXPR:
8515 case POPCOUNT_EXPR:
8516 case PARITY_EXPR:
8517 return 1;
8518
8519 case CLZ_EXPR:
8520 case CTZ_EXPR:
8521 /* These are undefined at zero. This is true even if
8522 C[LT]Z_DEFINED_VALUE_AT_ZERO is set, since what we're
8523 computing here is a user-visible property. */
8524 return 0;
8525
8526 case INTEGER_CST:
8527 return tree_int_cst_sgn (t) >= 0;
8528
8529 case REAL_CST:
8530 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
8531
8532 case PLUS_EXPR:
8533 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8534 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8535 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8536
8537 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8538 both unsigned and at least 2 bits shorter than the result. */
8539 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8540 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8541 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8542 {
8543 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8544 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8545 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8546 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8547 {
8548 unsigned int prec = MAX (TYPE_PRECISION (inner1),
8549 TYPE_PRECISION (inner2)) + 1;
8550 return prec < TYPE_PRECISION (TREE_TYPE (t));
8551 }
8552 }
8553 break;
8554
8555 case MULT_EXPR:
8556 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8557 {
8558 /* x * x for floating point x is always non-negative. */
8559 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8560 return 1;
8561 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8562 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8563 }
8564
8565 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8566 both unsigned and their total bits is shorter than the result. */
8567 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8568 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8569 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8570 {
8571 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8572 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8573 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8574 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8575 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8576 < TYPE_PRECISION (TREE_TYPE (t));
8577 }
8578 return 0;
8579
8580 case TRUNC_DIV_EXPR:
8581 case CEIL_DIV_EXPR:
8582 case FLOOR_DIV_EXPR:
8583 case ROUND_DIV_EXPR:
8584 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8585 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8586
8587 case TRUNC_MOD_EXPR:
8588 case CEIL_MOD_EXPR:
8589 case FLOOR_MOD_EXPR:
8590 case ROUND_MOD_EXPR:
8591 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8592
8593 case RDIV_EXPR:
8594 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8595 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8596
8597 case NOP_EXPR:
8598 {
8599 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8600 tree outer_type = TREE_TYPE (t);
8601
8602 if (TREE_CODE (outer_type) == REAL_TYPE)
8603 {
8604 if (TREE_CODE (inner_type) == REAL_TYPE)
8605 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8606 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8607 {
8608 if (TREE_UNSIGNED (inner_type))
8609 return 1;
8610 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8611 }
8612 }
8613 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8614 {
8615 if (TREE_CODE (inner_type) == REAL_TYPE)
8616 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8617 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8618 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8619 && TREE_UNSIGNED (inner_type);
8620 }
8621 }
8622 break;
8623
8624 case COND_EXPR:
8625 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8626 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
8627 case COMPOUND_EXPR:
8628 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8629 case MIN_EXPR:
8630 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8631 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8632 case MAX_EXPR:
8633 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8634 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8635 case MODIFY_EXPR:
8636 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8637 case BIND_EXPR:
8638 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8639 case SAVE_EXPR:
8640 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8641 case NON_LVALUE_EXPR:
8642 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8643 case FLOAT_EXPR:
8644 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8645 case RTL_EXPR:
8646 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
8647
8648 case CALL_EXPR:
8649 {
8650 tree fndecl = get_callee_fndecl (t);
8651 tree arglist = TREE_OPERAND (t, 1);
8652 if (fndecl
8653 && DECL_BUILT_IN (fndecl)
8654 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
8655 switch (DECL_FUNCTION_CODE (fndecl))
8656 {
8657 case BUILT_IN_CABS:
8658 case BUILT_IN_CABSL:
8659 case BUILT_IN_CABSF:
8660 case BUILT_IN_EXP:
8661 case BUILT_IN_EXPF:
8662 case BUILT_IN_EXPL:
8663 case BUILT_IN_FABS:
8664 case BUILT_IN_FABSF:
8665 case BUILT_IN_FABSL:
8666 case BUILT_IN_SQRT:
8667 case BUILT_IN_SQRTF:
8668 case BUILT_IN_SQRTL:
8669 return 1;
8670
8671 case BUILT_IN_ATAN:
8672 case BUILT_IN_ATANF:
8673 case BUILT_IN_ATANL:
8674 case BUILT_IN_CEIL:
8675 case BUILT_IN_CEILF:
8676 case BUILT_IN_CEILL:
8677 case BUILT_IN_FLOOR:
8678 case BUILT_IN_FLOORF:
8679 case BUILT_IN_FLOORL:
8680 case BUILT_IN_NEARBYINT:
8681 case BUILT_IN_NEARBYINTF:
8682 case BUILT_IN_NEARBYINTL:
8683 case BUILT_IN_ROUND:
8684 case BUILT_IN_ROUNDF:
8685 case BUILT_IN_ROUNDL:
8686 case BUILT_IN_TRUNC:
8687 case BUILT_IN_TRUNCF:
8688 case BUILT_IN_TRUNCL:
8689 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8690
8691 case BUILT_IN_POW:
8692 case BUILT_IN_POWF:
8693 case BUILT_IN_POWL:
8694 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8695
8696 default:
8697 break;
8698 }
8699 }
8700
8701 /* ... fall through ... */
8702
8703 default:
8704 if (truth_value_p (TREE_CODE (t)))
8705 /* Truth values evaluate to 0 or 1, which is nonnegative. */
8706 return 1;
8707 }
8708
8709 /* We don't know sign of `t', so be conservative and return false. */
8710 return 0;
8711 }
8712
8713 /* Return true if `r' is known to be non-negative.
8714 Only handles constants at the moment. */
8715
8716 int
8717 rtl_expr_nonnegative_p (rtx r)
8718 {
8719 switch (GET_CODE (r))
8720 {
8721 case CONST_INT:
8722 return INTVAL (r) >= 0;
8723
8724 case CONST_DOUBLE:
8725 if (GET_MODE (r) == VOIDmode)
8726 return CONST_DOUBLE_HIGH (r) >= 0;
8727 return 0;
8728
8729 case CONST_VECTOR:
8730 {
8731 int units, i;
8732 rtx elt;
8733
8734 units = CONST_VECTOR_NUNITS (r);
8735
8736 for (i = 0; i < units; ++i)
8737 {
8738 elt = CONST_VECTOR_ELT (r, i);
8739 if (!rtl_expr_nonnegative_p (elt))
8740 return 0;
8741 }
8742
8743 return 1;
8744 }
8745
8746 case SYMBOL_REF:
8747 case LABEL_REF:
8748 /* These are always nonnegative. */
8749 return 1;
8750
8751 default:
8752 return 0;
8753 }
8754 }
8755
8756 #include "gt-fold-const.h"