fold-const.c (fold <MULT_EXPR>): Optimize both x*pow(x,c) and pow(x,c)*x as pow(x...
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "real.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "toplev.h"
56 #include "ggc.h"
57 #include "hashtab.h"
58 #include "langhooks.h"
59 #include "md5.h"
60
61 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
62 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
63 static bool negate_expr_p (tree);
64 static tree negate_expr (tree);
65 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
66 static tree associate_trees (tree, tree, enum tree_code, tree);
67 static tree int_const_binop (enum tree_code, tree, tree, int);
68 static tree const_binop (enum tree_code, tree, tree, int);
69 static hashval_t size_htab_hash (const void *);
70 static int size_htab_eq (const void *, const void *);
71 static tree fold_convert (tree, tree);
72 static enum tree_code invert_tree_comparison (enum tree_code);
73 static enum tree_code swap_tree_comparison (enum tree_code);
74 static int comparison_to_compcode (enum tree_code);
75 static enum tree_code compcode_to_comparison (int);
76 static int truth_value_p (enum tree_code);
77 static int operand_equal_for_comparison_p (tree, tree, tree);
78 static int twoval_comparison_p (tree, tree *, tree *, int *);
79 static tree eval_subst (tree, tree, tree, tree, tree);
80 static tree pedantic_omit_one_operand (tree, tree, tree);
81 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
82 static tree make_bit_field_ref (tree, tree, int, int, int);
83 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
84 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
85 enum machine_mode *, int *, int *,
86 tree *, tree *);
87 static int all_ones_mask_p (tree, int);
88 static tree sign_bit_p (tree, tree);
89 static int simple_operand_p (tree);
90 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
91 static tree make_range (tree, int *, tree *, tree *);
92 static tree build_range_check (tree, tree, int, tree, tree);
93 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
94 tree);
95 static tree fold_range_test (tree);
96 static tree unextend (tree, int, int, tree);
97 static tree fold_truthop (enum tree_code, tree, tree, tree);
98 static tree optimize_minmax_comparison (tree);
99 static tree extract_muldiv (tree, tree, enum tree_code, tree);
100 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
101 static tree strip_compound_expr (tree, tree);
102 static int multiple_of_p (tree, tree, tree);
103 static tree constant_boolean_node (int, tree);
104 static int count_cond (tree, int);
105 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
106 tree, int);
107 static bool fold_real_zero_addition_p (tree, tree, int);
108 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
109 tree, tree, tree);
110 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
111
112 /* The following constants represent a bit based encoding of GCC's
113 comparison operators. This encoding simplifies transformations
114 on relational comparison operators, such as AND and OR. */
115 #define COMPCODE_FALSE 0
116 #define COMPCODE_LT 1
117 #define COMPCODE_EQ 2
118 #define COMPCODE_LE 3
119 #define COMPCODE_GT 4
120 #define COMPCODE_NE 5
121 #define COMPCODE_GE 6
122 #define COMPCODE_TRUE 7
123
124 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
125 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
126 and SUM1. Then this yields nonzero if overflow occurred during the
127 addition.
128
129 Overflow occurs if A and B have the same sign, but A and SUM differ in
130 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
131 sign. */
132 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
133 \f
134 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
135 We do that by representing the two-word integer in 4 words, with only
136 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
137 number. The value of the word is LOWPART + HIGHPART * BASE. */
138
139 #define LOWPART(x) \
140 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
141 #define HIGHPART(x) \
142 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
143 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
144
145 /* Unpack a two-word integer into 4 words.
146 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
147 WORDS points to the array of HOST_WIDE_INTs. */
148
149 static void
150 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
151 {
152 words[0] = LOWPART (low);
153 words[1] = HIGHPART (low);
154 words[2] = LOWPART (hi);
155 words[3] = HIGHPART (hi);
156 }
157
158 /* Pack an array of 4 words into a two-word integer.
159 WORDS points to the array of words.
160 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
161
162 static void
163 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
164 HOST_WIDE_INT *hi)
165 {
166 *low = words[0] + words[1] * BASE;
167 *hi = words[2] + words[3] * BASE;
168 }
169 \f
170 /* Make the integer constant T valid for its type by setting to 0 or 1 all
171 the bits in the constant that don't belong in the type.
172
173 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
174 nonzero, a signed overflow has already occurred in calculating T, so
175 propagate it. */
176
177 int
178 force_fit_type (tree t, int overflow)
179 {
180 unsigned HOST_WIDE_INT low;
181 HOST_WIDE_INT high;
182 unsigned int prec;
183
184 if (TREE_CODE (t) == REAL_CST)
185 {
186 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
187 Consider doing it via real_convert now. */
188 return overflow;
189 }
190
191 else if (TREE_CODE (t) != INTEGER_CST)
192 return overflow;
193
194 low = TREE_INT_CST_LOW (t);
195 high = TREE_INT_CST_HIGH (t);
196
197 if (POINTER_TYPE_P (TREE_TYPE (t))
198 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
199 prec = POINTER_SIZE;
200 else
201 prec = TYPE_PRECISION (TREE_TYPE (t));
202
203 /* First clear all bits that are beyond the type's precision. */
204
205 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
206 ;
207 else if (prec > HOST_BITS_PER_WIDE_INT)
208 TREE_INT_CST_HIGH (t)
209 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
210 else
211 {
212 TREE_INT_CST_HIGH (t) = 0;
213 if (prec < HOST_BITS_PER_WIDE_INT)
214 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
215 }
216
217 /* Unsigned types do not suffer sign extension or overflow unless they
218 are a sizetype. */
219 if (TREE_UNSIGNED (TREE_TYPE (t))
220 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
221 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
222 return overflow;
223
224 /* If the value's sign bit is set, extend the sign. */
225 if (prec != 2 * HOST_BITS_PER_WIDE_INT
226 && (prec > HOST_BITS_PER_WIDE_INT
227 ? 0 != (TREE_INT_CST_HIGH (t)
228 & ((HOST_WIDE_INT) 1
229 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
230 : 0 != (TREE_INT_CST_LOW (t)
231 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
232 {
233 /* Value is negative:
234 set to 1 all the bits that are outside this type's precision. */
235 if (prec > HOST_BITS_PER_WIDE_INT)
236 TREE_INT_CST_HIGH (t)
237 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
238 else
239 {
240 TREE_INT_CST_HIGH (t) = -1;
241 if (prec < HOST_BITS_PER_WIDE_INT)
242 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
243 }
244 }
245
246 /* Return nonzero if signed overflow occurred. */
247 return
248 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
249 != 0);
250 }
251 \f
252 /* Add two doubleword integers with doubleword result.
253 Each argument is given as two `HOST_WIDE_INT' pieces.
254 One argument is L1 and H1; the other, L2 and H2.
255 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
256
257 int
258 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
259 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
260 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
261 {
262 unsigned HOST_WIDE_INT l;
263 HOST_WIDE_INT h;
264
265 l = l1 + l2;
266 h = h1 + h2 + (l < l1);
267
268 *lv = l;
269 *hv = h;
270 return OVERFLOW_SUM_SIGN (h1, h2, h);
271 }
272
273 /* Negate a doubleword integer with doubleword result.
274 Return nonzero if the operation overflows, assuming it's signed.
275 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
276 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
277
278 int
279 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
280 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
281 {
282 if (l1 == 0)
283 {
284 *lv = 0;
285 *hv = - h1;
286 return (*hv & h1) < 0;
287 }
288 else
289 {
290 *lv = -l1;
291 *hv = ~h1;
292 return 0;
293 }
294 }
295 \f
296 /* Multiply two doubleword integers with doubleword result.
297 Return nonzero if the operation overflows, assuming it's signed.
298 Each argument is given as two `HOST_WIDE_INT' pieces.
299 One argument is L1 and H1; the other, L2 and H2.
300 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
301
302 int
303 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
304 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
305 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
306 {
307 HOST_WIDE_INT arg1[4];
308 HOST_WIDE_INT arg2[4];
309 HOST_WIDE_INT prod[4 * 2];
310 unsigned HOST_WIDE_INT carry;
311 int i, j, k;
312 unsigned HOST_WIDE_INT toplow, neglow;
313 HOST_WIDE_INT tophigh, neghigh;
314
315 encode (arg1, l1, h1);
316 encode (arg2, l2, h2);
317
318 memset (prod, 0, sizeof prod);
319
320 for (i = 0; i < 4; i++)
321 {
322 carry = 0;
323 for (j = 0; j < 4; j++)
324 {
325 k = i + j;
326 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
327 carry += arg1[i] * arg2[j];
328 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
329 carry += prod[k];
330 prod[k] = LOWPART (carry);
331 carry = HIGHPART (carry);
332 }
333 prod[i + 4] = carry;
334 }
335
336 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
337
338 /* Check for overflow by calculating the top half of the answer in full;
339 it should agree with the low half's sign bit. */
340 decode (prod + 4, &toplow, &tophigh);
341 if (h1 < 0)
342 {
343 neg_double (l2, h2, &neglow, &neghigh);
344 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
345 }
346 if (h2 < 0)
347 {
348 neg_double (l1, h1, &neglow, &neghigh);
349 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
350 }
351 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
352 }
353 \f
354 /* Shift the doubleword integer in L1, H1 left by COUNT places
355 keeping only PREC bits of result.
356 Shift right if COUNT is negative.
357 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
358 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
359
360 void
361 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
362 HOST_WIDE_INT count, unsigned int prec,
363 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
364 {
365 unsigned HOST_WIDE_INT signmask;
366
367 if (count < 0)
368 {
369 rshift_double (l1, h1, -count, prec, lv, hv, arith);
370 return;
371 }
372
373 #ifdef SHIFT_COUNT_TRUNCATED
374 if (SHIFT_COUNT_TRUNCATED)
375 count %= prec;
376 #endif
377
378 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
379 {
380 /* Shifting by the host word size is undefined according to the
381 ANSI standard, so we must handle this as a special case. */
382 *hv = 0;
383 *lv = 0;
384 }
385 else if (count >= HOST_BITS_PER_WIDE_INT)
386 {
387 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
388 *lv = 0;
389 }
390 else
391 {
392 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
393 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
394 *lv = l1 << count;
395 }
396
397 /* Sign extend all bits that are beyond the precision. */
398
399 signmask = -((prec > HOST_BITS_PER_WIDE_INT
400 ? ((unsigned HOST_WIDE_INT) *hv
401 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
402 : (*lv >> (prec - 1))) & 1);
403
404 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
405 ;
406 else if (prec >= HOST_BITS_PER_WIDE_INT)
407 {
408 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
409 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
410 }
411 else
412 {
413 *hv = signmask;
414 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
415 *lv |= signmask << prec;
416 }
417 }
418
419 /* Shift the doubleword integer in L1, H1 right by COUNT places
420 keeping only PREC bits of result. COUNT must be positive.
421 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
422 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
423
424 void
425 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
426 HOST_WIDE_INT count, unsigned int prec,
427 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
428 int arith)
429 {
430 unsigned HOST_WIDE_INT signmask;
431
432 signmask = (arith
433 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
434 : 0);
435
436 #ifdef SHIFT_COUNT_TRUNCATED
437 if (SHIFT_COUNT_TRUNCATED)
438 count %= prec;
439 #endif
440
441 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
442 {
443 /* Shifting by the host word size is undefined according to the
444 ANSI standard, so we must handle this as a special case. */
445 *hv = 0;
446 *lv = 0;
447 }
448 else if (count >= HOST_BITS_PER_WIDE_INT)
449 {
450 *hv = 0;
451 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
452 }
453 else
454 {
455 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
456 *lv = ((l1 >> count)
457 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
458 }
459
460 /* Zero / sign extend all bits that are beyond the precision. */
461
462 if (count >= (HOST_WIDE_INT)prec)
463 {
464 *hv = signmask;
465 *lv = signmask;
466 }
467 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
468 ;
469 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
470 {
471 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
472 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
473 }
474 else
475 {
476 *hv = signmask;
477 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
478 *lv |= signmask << (prec - count);
479 }
480 }
481 \f
482 /* Rotate the doubleword integer in L1, H1 left by COUNT places
483 keeping only PREC bits of result.
484 Rotate right if COUNT is negative.
485 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
486
487 void
488 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
489 HOST_WIDE_INT count, unsigned int prec,
490 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
491 {
492 unsigned HOST_WIDE_INT s1l, s2l;
493 HOST_WIDE_INT s1h, s2h;
494
495 count %= prec;
496 if (count < 0)
497 count += prec;
498
499 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
500 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
501 *lv = s1l | s2l;
502 *hv = s1h | s2h;
503 }
504
505 /* Rotate the doubleword integer in L1, H1 left by COUNT places
506 keeping only PREC bits of result. COUNT must be positive.
507 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
508
509 void
510 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
511 HOST_WIDE_INT count, unsigned int prec,
512 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
513 {
514 unsigned HOST_WIDE_INT s1l, s2l;
515 HOST_WIDE_INT s1h, s2h;
516
517 count %= prec;
518 if (count < 0)
519 count += prec;
520
521 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
522 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
523 *lv = s1l | s2l;
524 *hv = s1h | s2h;
525 }
526 \f
527 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
528 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
529 CODE is a tree code for a kind of division, one of
530 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
531 or EXACT_DIV_EXPR
532 It controls how the quotient is rounded to an integer.
533 Return nonzero if the operation overflows.
534 UNS nonzero says do unsigned division. */
535
536 int
537 div_and_round_double (enum tree_code code, int uns,
538 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
539 HOST_WIDE_INT hnum_orig,
540 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
541 HOST_WIDE_INT hden_orig,
542 unsigned HOST_WIDE_INT *lquo,
543 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
544 HOST_WIDE_INT *hrem)
545 {
546 int quo_neg = 0;
547 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
548 HOST_WIDE_INT den[4], quo[4];
549 int i, j;
550 unsigned HOST_WIDE_INT work;
551 unsigned HOST_WIDE_INT carry = 0;
552 unsigned HOST_WIDE_INT lnum = lnum_orig;
553 HOST_WIDE_INT hnum = hnum_orig;
554 unsigned HOST_WIDE_INT lden = lden_orig;
555 HOST_WIDE_INT hden = hden_orig;
556 int overflow = 0;
557
558 if (hden == 0 && lden == 0)
559 overflow = 1, lden = 1;
560
561 /* calculate quotient sign and convert operands to unsigned. */
562 if (!uns)
563 {
564 if (hnum < 0)
565 {
566 quo_neg = ~ quo_neg;
567 /* (minimum integer) / (-1) is the only overflow case. */
568 if (neg_double (lnum, hnum, &lnum, &hnum)
569 && ((HOST_WIDE_INT) lden & hden) == -1)
570 overflow = 1;
571 }
572 if (hden < 0)
573 {
574 quo_neg = ~ quo_neg;
575 neg_double (lden, hden, &lden, &hden);
576 }
577 }
578
579 if (hnum == 0 && hden == 0)
580 { /* single precision */
581 *hquo = *hrem = 0;
582 /* This unsigned division rounds toward zero. */
583 *lquo = lnum / lden;
584 goto finish_up;
585 }
586
587 if (hnum == 0)
588 { /* trivial case: dividend < divisor */
589 /* hden != 0 already checked. */
590 *hquo = *lquo = 0;
591 *hrem = hnum;
592 *lrem = lnum;
593 goto finish_up;
594 }
595
596 memset (quo, 0, sizeof quo);
597
598 memset (num, 0, sizeof num); /* to zero 9th element */
599 memset (den, 0, sizeof den);
600
601 encode (num, lnum, hnum);
602 encode (den, lden, hden);
603
604 /* Special code for when the divisor < BASE. */
605 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
606 {
607 /* hnum != 0 already checked. */
608 for (i = 4 - 1; i >= 0; i--)
609 {
610 work = num[i] + carry * BASE;
611 quo[i] = work / lden;
612 carry = work % lden;
613 }
614 }
615 else
616 {
617 /* Full double precision division,
618 with thanks to Don Knuth's "Seminumerical Algorithms". */
619 int num_hi_sig, den_hi_sig;
620 unsigned HOST_WIDE_INT quo_est, scale;
621
622 /* Find the highest nonzero divisor digit. */
623 for (i = 4 - 1;; i--)
624 if (den[i] != 0)
625 {
626 den_hi_sig = i;
627 break;
628 }
629
630 /* Insure that the first digit of the divisor is at least BASE/2.
631 This is required by the quotient digit estimation algorithm. */
632
633 scale = BASE / (den[den_hi_sig] + 1);
634 if (scale > 1)
635 { /* scale divisor and dividend */
636 carry = 0;
637 for (i = 0; i <= 4 - 1; i++)
638 {
639 work = (num[i] * scale) + carry;
640 num[i] = LOWPART (work);
641 carry = HIGHPART (work);
642 }
643
644 num[4] = carry;
645 carry = 0;
646 for (i = 0; i <= 4 - 1; i++)
647 {
648 work = (den[i] * scale) + carry;
649 den[i] = LOWPART (work);
650 carry = HIGHPART (work);
651 if (den[i] != 0) den_hi_sig = i;
652 }
653 }
654
655 num_hi_sig = 4;
656
657 /* Main loop */
658 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
659 {
660 /* Guess the next quotient digit, quo_est, by dividing the first
661 two remaining dividend digits by the high order quotient digit.
662 quo_est is never low and is at most 2 high. */
663 unsigned HOST_WIDE_INT tmp;
664
665 num_hi_sig = i + den_hi_sig + 1;
666 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
667 if (num[num_hi_sig] != den[den_hi_sig])
668 quo_est = work / den[den_hi_sig];
669 else
670 quo_est = BASE - 1;
671
672 /* Refine quo_est so it's usually correct, and at most one high. */
673 tmp = work - quo_est * den[den_hi_sig];
674 if (tmp < BASE
675 && (den[den_hi_sig - 1] * quo_est
676 > (tmp * BASE + num[num_hi_sig - 2])))
677 quo_est--;
678
679 /* Try QUO_EST as the quotient digit, by multiplying the
680 divisor by QUO_EST and subtracting from the remaining dividend.
681 Keep in mind that QUO_EST is the I - 1st digit. */
682
683 carry = 0;
684 for (j = 0; j <= den_hi_sig; j++)
685 {
686 work = quo_est * den[j] + carry;
687 carry = HIGHPART (work);
688 work = num[i + j] - LOWPART (work);
689 num[i + j] = LOWPART (work);
690 carry += HIGHPART (work) != 0;
691 }
692
693 /* If quo_est was high by one, then num[i] went negative and
694 we need to correct things. */
695 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
696 {
697 quo_est--;
698 carry = 0; /* add divisor back in */
699 for (j = 0; j <= den_hi_sig; j++)
700 {
701 work = num[i + j] + den[j] + carry;
702 carry = HIGHPART (work);
703 num[i + j] = LOWPART (work);
704 }
705
706 num [num_hi_sig] += carry;
707 }
708
709 /* Store the quotient digit. */
710 quo[i] = quo_est;
711 }
712 }
713
714 decode (quo, lquo, hquo);
715
716 finish_up:
717 /* If result is negative, make it so. */
718 if (quo_neg)
719 neg_double (*lquo, *hquo, lquo, hquo);
720
721 /* compute trial remainder: rem = num - (quo * den) */
722 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
723 neg_double (*lrem, *hrem, lrem, hrem);
724 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
725
726 switch (code)
727 {
728 case TRUNC_DIV_EXPR:
729 case TRUNC_MOD_EXPR: /* round toward zero */
730 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
731 return overflow;
732
733 case FLOOR_DIV_EXPR:
734 case FLOOR_MOD_EXPR: /* round toward negative infinity */
735 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
736 {
737 /* quo = quo - 1; */
738 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
739 lquo, hquo);
740 }
741 else
742 return overflow;
743 break;
744
745 case CEIL_DIV_EXPR:
746 case CEIL_MOD_EXPR: /* round toward positive infinity */
747 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
748 {
749 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
750 lquo, hquo);
751 }
752 else
753 return overflow;
754 break;
755
756 case ROUND_DIV_EXPR:
757 case ROUND_MOD_EXPR: /* round to closest integer */
758 {
759 unsigned HOST_WIDE_INT labs_rem = *lrem;
760 HOST_WIDE_INT habs_rem = *hrem;
761 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
762 HOST_WIDE_INT habs_den = hden, htwice;
763
764 /* Get absolute values. */
765 if (*hrem < 0)
766 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
767 if (hden < 0)
768 neg_double (lden, hden, &labs_den, &habs_den);
769
770 /* If (2 * abs (lrem) >= abs (lden)) */
771 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
772 labs_rem, habs_rem, &ltwice, &htwice);
773
774 if (((unsigned HOST_WIDE_INT) habs_den
775 < (unsigned HOST_WIDE_INT) htwice)
776 || (((unsigned HOST_WIDE_INT) habs_den
777 == (unsigned HOST_WIDE_INT) htwice)
778 && (labs_den < ltwice)))
779 {
780 if (*hquo < 0)
781 /* quo = quo - 1; */
782 add_double (*lquo, *hquo,
783 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
784 else
785 /* quo = quo + 1; */
786 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
787 lquo, hquo);
788 }
789 else
790 return overflow;
791 }
792 break;
793
794 default:
795 abort ();
796 }
797
798 /* compute true remainder: rem = num - (quo * den) */
799 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
800 neg_double (*lrem, *hrem, lrem, hrem);
801 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
802 return overflow;
803 }
804 \f
805 /* Determine whether an expression T can be cheaply negated using
806 the function negate_expr. */
807
808 static bool
809 negate_expr_p (tree t)
810 {
811 unsigned HOST_WIDE_INT val;
812 unsigned int prec;
813 tree type;
814
815 if (t == 0)
816 return false;
817
818 type = TREE_TYPE (t);
819
820 STRIP_SIGN_NOPS (t);
821 switch (TREE_CODE (t))
822 {
823 case INTEGER_CST:
824 if (TREE_UNSIGNED (type))
825 return false;
826
827 /* Check that -CST will not overflow type. */
828 prec = TYPE_PRECISION (type);
829 if (prec > HOST_BITS_PER_WIDE_INT)
830 {
831 if (TREE_INT_CST_LOW (t) != 0)
832 return true;
833 prec -= HOST_BITS_PER_WIDE_INT;
834 val = TREE_INT_CST_HIGH (t);
835 }
836 else
837 val = TREE_INT_CST_LOW (t);
838 if (prec < HOST_BITS_PER_WIDE_INT)
839 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
840 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
841
842 case REAL_CST:
843 case NEGATE_EXPR:
844 case MINUS_EXPR:
845 return true;
846
847 default:
848 break;
849 }
850 return false;
851 }
852
853 /* Given T, an expression, return the negation of T. Allow for T to be
854 null, in which case return null. */
855
856 static tree
857 negate_expr (tree t)
858 {
859 tree type;
860 tree tem;
861
862 if (t == 0)
863 return 0;
864
865 type = TREE_TYPE (t);
866 STRIP_SIGN_NOPS (t);
867
868 switch (TREE_CODE (t))
869 {
870 case INTEGER_CST:
871 case REAL_CST:
872 if (! TREE_UNSIGNED (type)
873 && 0 != (tem = fold (build1 (NEGATE_EXPR, type, t)))
874 && ! TREE_OVERFLOW (tem))
875 return tem;
876 break;
877
878 case NEGATE_EXPR:
879 return convert (type, TREE_OPERAND (t, 0));
880
881 case MINUS_EXPR:
882 /* - (A - B) -> B - A */
883 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
884 return convert (type,
885 fold (build (MINUS_EXPR, TREE_TYPE (t),
886 TREE_OPERAND (t, 1),
887 TREE_OPERAND (t, 0))));
888 break;
889
890 default:
891 break;
892 }
893
894 return convert (type, fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t)));
895 }
896 \f
897 /* Split a tree IN into a constant, literal and variable parts that could be
898 combined with CODE to make IN. "constant" means an expression with
899 TREE_CONSTANT but that isn't an actual constant. CODE must be a
900 commutative arithmetic operation. Store the constant part into *CONP,
901 the literal in *LITP and return the variable part. If a part isn't
902 present, set it to null. If the tree does not decompose in this way,
903 return the entire tree as the variable part and the other parts as null.
904
905 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
906 case, we negate an operand that was subtracted. Except if it is a
907 literal for which we use *MINUS_LITP instead.
908
909 If NEGATE_P is true, we are negating all of IN, again except a literal
910 for which we use *MINUS_LITP instead.
911
912 If IN is itself a literal or constant, return it as appropriate.
913
914 Note that we do not guarantee that any of the three values will be the
915 same type as IN, but they will have the same signedness and mode. */
916
917 static tree
918 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
919 tree *minus_litp, int negate_p)
920 {
921 tree var = 0;
922
923 *conp = 0;
924 *litp = 0;
925 *minus_litp = 0;
926
927 /* Strip any conversions that don't change the machine mode or signedness. */
928 STRIP_SIGN_NOPS (in);
929
930 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
931 *litp = in;
932 else if (TREE_CODE (in) == code
933 || (! FLOAT_TYPE_P (TREE_TYPE (in))
934 /* We can associate addition and subtraction together (even
935 though the C standard doesn't say so) for integers because
936 the value is not affected. For reals, the value might be
937 affected, so we can't. */
938 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
939 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
940 {
941 tree op0 = TREE_OPERAND (in, 0);
942 tree op1 = TREE_OPERAND (in, 1);
943 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
944 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
945
946 /* First see if either of the operands is a literal, then a constant. */
947 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
948 *litp = op0, op0 = 0;
949 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
950 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
951
952 if (op0 != 0 && TREE_CONSTANT (op0))
953 *conp = op0, op0 = 0;
954 else if (op1 != 0 && TREE_CONSTANT (op1))
955 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
956
957 /* If we haven't dealt with either operand, this is not a case we can
958 decompose. Otherwise, VAR is either of the ones remaining, if any. */
959 if (op0 != 0 && op1 != 0)
960 var = in;
961 else if (op0 != 0)
962 var = op0;
963 else
964 var = op1, neg_var_p = neg1_p;
965
966 /* Now do any needed negations. */
967 if (neg_litp_p)
968 *minus_litp = *litp, *litp = 0;
969 if (neg_conp_p)
970 *conp = negate_expr (*conp);
971 if (neg_var_p)
972 var = negate_expr (var);
973 }
974 else if (TREE_CONSTANT (in))
975 *conp = in;
976 else
977 var = in;
978
979 if (negate_p)
980 {
981 if (*litp)
982 *minus_litp = *litp, *litp = 0;
983 else if (*minus_litp)
984 *litp = *minus_litp, *minus_litp = 0;
985 *conp = negate_expr (*conp);
986 var = negate_expr (var);
987 }
988
989 return var;
990 }
991
992 /* Re-associate trees split by the above function. T1 and T2 are either
993 expressions to associate or null. Return the new expression, if any. If
994 we build an operation, do it in TYPE and with CODE. */
995
996 static tree
997 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
998 {
999 if (t1 == 0)
1000 return t2;
1001 else if (t2 == 0)
1002 return t1;
1003
1004 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1005 try to fold this since we will have infinite recursion. But do
1006 deal with any NEGATE_EXPRs. */
1007 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1008 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1009 {
1010 if (code == PLUS_EXPR)
1011 {
1012 if (TREE_CODE (t1) == NEGATE_EXPR)
1013 return build (MINUS_EXPR, type, convert (type, t2),
1014 convert (type, TREE_OPERAND (t1, 0)));
1015 else if (TREE_CODE (t2) == NEGATE_EXPR)
1016 return build (MINUS_EXPR, type, convert (type, t1),
1017 convert (type, TREE_OPERAND (t2, 0)));
1018 }
1019 return build (code, type, convert (type, t1), convert (type, t2));
1020 }
1021
1022 return fold (build (code, type, convert (type, t1), convert (type, t2)));
1023 }
1024 \f
1025 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1026 to produce a new constant.
1027
1028 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1029
1030 static tree
1031 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1032 {
1033 unsigned HOST_WIDE_INT int1l, int2l;
1034 HOST_WIDE_INT int1h, int2h;
1035 unsigned HOST_WIDE_INT low;
1036 HOST_WIDE_INT hi;
1037 unsigned HOST_WIDE_INT garbagel;
1038 HOST_WIDE_INT garbageh;
1039 tree t;
1040 tree type = TREE_TYPE (arg1);
1041 int uns = TREE_UNSIGNED (type);
1042 int is_sizetype
1043 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1044 int overflow = 0;
1045 int no_overflow = 0;
1046
1047 int1l = TREE_INT_CST_LOW (arg1);
1048 int1h = TREE_INT_CST_HIGH (arg1);
1049 int2l = TREE_INT_CST_LOW (arg2);
1050 int2h = TREE_INT_CST_HIGH (arg2);
1051
1052 switch (code)
1053 {
1054 case BIT_IOR_EXPR:
1055 low = int1l | int2l, hi = int1h | int2h;
1056 break;
1057
1058 case BIT_XOR_EXPR:
1059 low = int1l ^ int2l, hi = int1h ^ int2h;
1060 break;
1061
1062 case BIT_AND_EXPR:
1063 low = int1l & int2l, hi = int1h & int2h;
1064 break;
1065
1066 case BIT_ANDTC_EXPR:
1067 low = int1l & ~int2l, hi = int1h & ~int2h;
1068 break;
1069
1070 case RSHIFT_EXPR:
1071 int2l = -int2l;
1072 case LSHIFT_EXPR:
1073 /* It's unclear from the C standard whether shifts can overflow.
1074 The following code ignores overflow; perhaps a C standard
1075 interpretation ruling is needed. */
1076 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1077 &low, &hi, !uns);
1078 no_overflow = 1;
1079 break;
1080
1081 case RROTATE_EXPR:
1082 int2l = - int2l;
1083 case LROTATE_EXPR:
1084 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1085 &low, &hi);
1086 break;
1087
1088 case PLUS_EXPR:
1089 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1090 break;
1091
1092 case MINUS_EXPR:
1093 neg_double (int2l, int2h, &low, &hi);
1094 add_double (int1l, int1h, low, hi, &low, &hi);
1095 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1096 break;
1097
1098 case MULT_EXPR:
1099 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1100 break;
1101
1102 case TRUNC_DIV_EXPR:
1103 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1104 case EXACT_DIV_EXPR:
1105 /* This is a shortcut for a common special case. */
1106 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1107 && ! TREE_CONSTANT_OVERFLOW (arg1)
1108 && ! TREE_CONSTANT_OVERFLOW (arg2)
1109 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1110 {
1111 if (code == CEIL_DIV_EXPR)
1112 int1l += int2l - 1;
1113
1114 low = int1l / int2l, hi = 0;
1115 break;
1116 }
1117
1118 /* ... fall through ... */
1119
1120 case ROUND_DIV_EXPR:
1121 if (int2h == 0 && int2l == 1)
1122 {
1123 low = int1l, hi = int1h;
1124 break;
1125 }
1126 if (int1l == int2l && int1h == int2h
1127 && ! (int1l == 0 && int1h == 0))
1128 {
1129 low = 1, hi = 0;
1130 break;
1131 }
1132 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1133 &low, &hi, &garbagel, &garbageh);
1134 break;
1135
1136 case TRUNC_MOD_EXPR:
1137 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1138 /* This is a shortcut for a common special case. */
1139 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1140 && ! TREE_CONSTANT_OVERFLOW (arg1)
1141 && ! TREE_CONSTANT_OVERFLOW (arg2)
1142 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1143 {
1144 if (code == CEIL_MOD_EXPR)
1145 int1l += int2l - 1;
1146 low = int1l % int2l, hi = 0;
1147 break;
1148 }
1149
1150 /* ... fall through ... */
1151
1152 case ROUND_MOD_EXPR:
1153 overflow = div_and_round_double (code, uns,
1154 int1l, int1h, int2l, int2h,
1155 &garbagel, &garbageh, &low, &hi);
1156 break;
1157
1158 case MIN_EXPR:
1159 case MAX_EXPR:
1160 if (uns)
1161 low = (((unsigned HOST_WIDE_INT) int1h
1162 < (unsigned HOST_WIDE_INT) int2h)
1163 || (((unsigned HOST_WIDE_INT) int1h
1164 == (unsigned HOST_WIDE_INT) int2h)
1165 && int1l < int2l));
1166 else
1167 low = (int1h < int2h
1168 || (int1h == int2h && int1l < int2l));
1169
1170 if (low == (code == MIN_EXPR))
1171 low = int1l, hi = int1h;
1172 else
1173 low = int2l, hi = int2h;
1174 break;
1175
1176 default:
1177 abort ();
1178 }
1179
1180 /* If this is for a sizetype, can be represented as one (signed)
1181 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1182 constants. */
1183 if (is_sizetype
1184 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1185 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1186 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1187 return size_int_type_wide (low, type);
1188 else
1189 {
1190 t = build_int_2 (low, hi);
1191 TREE_TYPE (t) = TREE_TYPE (arg1);
1192 }
1193
1194 TREE_OVERFLOW (t)
1195 = ((notrunc
1196 ? (!uns || is_sizetype) && overflow
1197 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1198 && ! no_overflow))
1199 | TREE_OVERFLOW (arg1)
1200 | TREE_OVERFLOW (arg2));
1201
1202 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1203 So check if force_fit_type truncated the value. */
1204 if (is_sizetype
1205 && ! TREE_OVERFLOW (t)
1206 && (TREE_INT_CST_HIGH (t) != hi
1207 || TREE_INT_CST_LOW (t) != low))
1208 TREE_OVERFLOW (t) = 1;
1209
1210 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1211 | TREE_CONSTANT_OVERFLOW (arg1)
1212 | TREE_CONSTANT_OVERFLOW (arg2));
1213 return t;
1214 }
1215
1216 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1217 constant. We assume ARG1 and ARG2 have the same data type, or at least
1218 are the same kind of constant and the same machine mode.
1219
1220 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1221
1222 static tree
1223 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1224 {
1225 STRIP_NOPS (arg1);
1226 STRIP_NOPS (arg2);
1227
1228 if (TREE_CODE (arg1) == INTEGER_CST)
1229 return int_const_binop (code, arg1, arg2, notrunc);
1230
1231 if (TREE_CODE (arg1) == REAL_CST)
1232 {
1233 enum machine_mode mode;
1234 REAL_VALUE_TYPE d1;
1235 REAL_VALUE_TYPE d2;
1236 REAL_VALUE_TYPE value;
1237 tree t, type;
1238
1239 d1 = TREE_REAL_CST (arg1);
1240 d2 = TREE_REAL_CST (arg2);
1241
1242 type = TREE_TYPE (arg1);
1243 mode = TYPE_MODE (type);
1244
1245 /* Don't perform operation if we honor signaling NaNs and
1246 either operand is a NaN. */
1247 if (HONOR_SNANS (mode)
1248 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1249 return NULL_TREE;
1250
1251 /* Don't perform operation if it would raise a division
1252 by zero exception. */
1253 if (code == RDIV_EXPR
1254 && REAL_VALUES_EQUAL (d2, dconst0)
1255 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1256 return NULL_TREE;
1257
1258 /* If either operand is a NaN, just return it. Otherwise, set up
1259 for floating-point trap; we return an overflow. */
1260 if (REAL_VALUE_ISNAN (d1))
1261 return arg1;
1262 else if (REAL_VALUE_ISNAN (d2))
1263 return arg2;
1264
1265 REAL_ARITHMETIC (value, code, d1, d2);
1266
1267 t = build_real (type, real_value_truncate (mode, value));
1268
1269 TREE_OVERFLOW (t)
1270 = (force_fit_type (t, 0)
1271 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1272 TREE_CONSTANT_OVERFLOW (t)
1273 = TREE_OVERFLOW (t)
1274 | TREE_CONSTANT_OVERFLOW (arg1)
1275 | TREE_CONSTANT_OVERFLOW (arg2);
1276 return t;
1277 }
1278 if (TREE_CODE (arg1) == COMPLEX_CST)
1279 {
1280 tree type = TREE_TYPE (arg1);
1281 tree r1 = TREE_REALPART (arg1);
1282 tree i1 = TREE_IMAGPART (arg1);
1283 tree r2 = TREE_REALPART (arg2);
1284 tree i2 = TREE_IMAGPART (arg2);
1285 tree t;
1286
1287 switch (code)
1288 {
1289 case PLUS_EXPR:
1290 t = build_complex (type,
1291 const_binop (PLUS_EXPR, r1, r2, notrunc),
1292 const_binop (PLUS_EXPR, i1, i2, notrunc));
1293 break;
1294
1295 case MINUS_EXPR:
1296 t = build_complex (type,
1297 const_binop (MINUS_EXPR, r1, r2, notrunc),
1298 const_binop (MINUS_EXPR, i1, i2, notrunc));
1299 break;
1300
1301 case MULT_EXPR:
1302 t = build_complex (type,
1303 const_binop (MINUS_EXPR,
1304 const_binop (MULT_EXPR,
1305 r1, r2, notrunc),
1306 const_binop (MULT_EXPR,
1307 i1, i2, notrunc),
1308 notrunc),
1309 const_binop (PLUS_EXPR,
1310 const_binop (MULT_EXPR,
1311 r1, i2, notrunc),
1312 const_binop (MULT_EXPR,
1313 i1, r2, notrunc),
1314 notrunc));
1315 break;
1316
1317 case RDIV_EXPR:
1318 {
1319 tree magsquared
1320 = const_binop (PLUS_EXPR,
1321 const_binop (MULT_EXPR, r2, r2, notrunc),
1322 const_binop (MULT_EXPR, i2, i2, notrunc),
1323 notrunc);
1324
1325 t = build_complex (type,
1326 const_binop
1327 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1328 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1329 const_binop (PLUS_EXPR,
1330 const_binop (MULT_EXPR, r1, r2,
1331 notrunc),
1332 const_binop (MULT_EXPR, i1, i2,
1333 notrunc),
1334 notrunc),
1335 magsquared, notrunc),
1336 const_binop
1337 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1338 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1339 const_binop (MINUS_EXPR,
1340 const_binop (MULT_EXPR, i1, r2,
1341 notrunc),
1342 const_binop (MULT_EXPR, r1, i2,
1343 notrunc),
1344 notrunc),
1345 magsquared, notrunc));
1346 }
1347 break;
1348
1349 default:
1350 abort ();
1351 }
1352 return t;
1353 }
1354 return 0;
1355 }
1356
1357 /* These are the hash table functions for the hash table of INTEGER_CST
1358 nodes of a sizetype. */
1359
1360 /* Return the hash code code X, an INTEGER_CST. */
1361
1362 static hashval_t
1363 size_htab_hash (const void *x)
1364 {
1365 tree t = (tree) x;
1366
1367 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1368 ^ htab_hash_pointer (TREE_TYPE (t))
1369 ^ (TREE_OVERFLOW (t) << 20));
1370 }
1371
1372 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1373 is the same as that given by *Y, which is the same. */
1374
1375 static int
1376 size_htab_eq (const void *x, const void *y)
1377 {
1378 tree xt = (tree) x;
1379 tree yt = (tree) y;
1380
1381 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1382 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1383 && TREE_TYPE (xt) == TREE_TYPE (yt)
1384 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1385 }
1386 \f
1387 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1388 bits are given by NUMBER and of the sizetype represented by KIND. */
1389
1390 tree
1391 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1392 {
1393 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1394 }
1395
1396 /* Likewise, but the desired type is specified explicitly. */
1397
1398 static GTY (()) tree new_const;
1399 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1400 htab_t size_htab;
1401
1402 tree
1403 size_int_type_wide (HOST_WIDE_INT number, tree type)
1404 {
1405 void **slot;
1406
1407 if (size_htab == 0)
1408 {
1409 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1410 new_const = make_node (INTEGER_CST);
1411 }
1412
1413 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1414 hash table, we return the value from the hash table. Otherwise, we
1415 place that in the hash table and make a new node for the next time. */
1416 TREE_INT_CST_LOW (new_const) = number;
1417 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1418 TREE_TYPE (new_const) = type;
1419 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1420 = force_fit_type (new_const, 0);
1421
1422 slot = htab_find_slot (size_htab, new_const, INSERT);
1423 if (*slot == 0)
1424 {
1425 tree t = new_const;
1426
1427 *slot = new_const;
1428 new_const = make_node (INTEGER_CST);
1429 return t;
1430 }
1431 else
1432 return (tree) *slot;
1433 }
1434
1435 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1436 is a tree code. The type of the result is taken from the operands.
1437 Both must be the same type integer type and it must be a size type.
1438 If the operands are constant, so is the result. */
1439
1440 tree
1441 size_binop (enum tree_code code, tree arg0, tree arg1)
1442 {
1443 tree type = TREE_TYPE (arg0);
1444
1445 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1446 || type != TREE_TYPE (arg1))
1447 abort ();
1448
1449 /* Handle the special case of two integer constants faster. */
1450 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1451 {
1452 /* And some specific cases even faster than that. */
1453 if (code == PLUS_EXPR && integer_zerop (arg0))
1454 return arg1;
1455 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1456 && integer_zerop (arg1))
1457 return arg0;
1458 else if (code == MULT_EXPR && integer_onep (arg0))
1459 return arg1;
1460
1461 /* Handle general case of two integer constants. */
1462 return int_const_binop (code, arg0, arg1, 0);
1463 }
1464
1465 if (arg0 == error_mark_node || arg1 == error_mark_node)
1466 return error_mark_node;
1467
1468 return fold (build (code, type, arg0, arg1));
1469 }
1470
1471 /* Given two values, either both of sizetype or both of bitsizetype,
1472 compute the difference between the two values. Return the value
1473 in signed type corresponding to the type of the operands. */
1474
1475 tree
1476 size_diffop (tree arg0, tree arg1)
1477 {
1478 tree type = TREE_TYPE (arg0);
1479 tree ctype;
1480
1481 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1482 || type != TREE_TYPE (arg1))
1483 abort ();
1484
1485 /* If the type is already signed, just do the simple thing. */
1486 if (! TREE_UNSIGNED (type))
1487 return size_binop (MINUS_EXPR, arg0, arg1);
1488
1489 ctype = (type == bitsizetype || type == ubitsizetype
1490 ? sbitsizetype : ssizetype);
1491
1492 /* If either operand is not a constant, do the conversions to the signed
1493 type and subtract. The hardware will do the right thing with any
1494 overflow in the subtraction. */
1495 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1496 return size_binop (MINUS_EXPR, convert (ctype, arg0),
1497 convert (ctype, arg1));
1498
1499 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1500 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1501 overflow) and negate (which can't either). Special-case a result
1502 of zero while we're here. */
1503 if (tree_int_cst_equal (arg0, arg1))
1504 return convert (ctype, integer_zero_node);
1505 else if (tree_int_cst_lt (arg1, arg0))
1506 return convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1507 else
1508 return size_binop (MINUS_EXPR, convert (ctype, integer_zero_node),
1509 convert (ctype, size_binop (MINUS_EXPR, arg1, arg0)));
1510 }
1511 \f
1512
1513 /* Given T, a tree representing type conversion of ARG1, a constant,
1514 return a constant tree representing the result of conversion. */
1515
1516 static tree
1517 fold_convert (tree t, tree arg1)
1518 {
1519 tree type = TREE_TYPE (t);
1520 int overflow = 0;
1521
1522 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1523 {
1524 if (TREE_CODE (arg1) == INTEGER_CST)
1525 {
1526 /* If we would build a constant wider than GCC supports,
1527 leave the conversion unfolded. */
1528 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1529 return t;
1530
1531 /* If we are trying to make a sizetype for a small integer, use
1532 size_int to pick up cached types to reduce duplicate nodes. */
1533 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1534 && !TREE_CONSTANT_OVERFLOW (arg1)
1535 && compare_tree_int (arg1, 10000) < 0)
1536 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1537
1538 /* Given an integer constant, make new constant with new type,
1539 appropriately sign-extended or truncated. */
1540 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1541 TREE_INT_CST_HIGH (arg1));
1542 TREE_TYPE (t) = type;
1543 /* Indicate an overflow if (1) ARG1 already overflowed,
1544 or (2) force_fit_type indicates an overflow.
1545 Tell force_fit_type that an overflow has already occurred
1546 if ARG1 is a too-large unsigned value and T is signed.
1547 But don't indicate an overflow if converting a pointer. */
1548 TREE_OVERFLOW (t)
1549 = ((force_fit_type (t,
1550 (TREE_INT_CST_HIGH (arg1) < 0
1551 && (TREE_UNSIGNED (type)
1552 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1553 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1554 || TREE_OVERFLOW (arg1));
1555 TREE_CONSTANT_OVERFLOW (t)
1556 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1557 }
1558 else if (TREE_CODE (arg1) == REAL_CST)
1559 {
1560 /* Don't initialize these, use assignments.
1561 Initialized local aggregates don't work on old compilers. */
1562 REAL_VALUE_TYPE x;
1563 REAL_VALUE_TYPE l;
1564 REAL_VALUE_TYPE u;
1565 tree type1 = TREE_TYPE (arg1);
1566 int no_upper_bound;
1567
1568 x = TREE_REAL_CST (arg1);
1569 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1570
1571 no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
1572 if (!no_upper_bound)
1573 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1574
1575 /* See if X will be in range after truncation towards 0.
1576 To compensate for truncation, move the bounds away from 0,
1577 but reject if X exactly equals the adjusted bounds. */
1578 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1579 if (!no_upper_bound)
1580 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1581 /* If X is a NaN, use zero instead and show we have an overflow.
1582 Otherwise, range check. */
1583 if (REAL_VALUE_ISNAN (x))
1584 overflow = 1, x = dconst0;
1585 else if (! (REAL_VALUES_LESS (l, x)
1586 && !no_upper_bound
1587 && REAL_VALUES_LESS (x, u)))
1588 overflow = 1;
1589
1590 {
1591 HOST_WIDE_INT low, high;
1592 REAL_VALUE_TO_INT (&low, &high, x);
1593 t = build_int_2 (low, high);
1594 }
1595 TREE_TYPE (t) = type;
1596 TREE_OVERFLOW (t)
1597 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1598 TREE_CONSTANT_OVERFLOW (t)
1599 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1600 }
1601 TREE_TYPE (t) = type;
1602 }
1603 else if (TREE_CODE (type) == REAL_TYPE)
1604 {
1605 if (TREE_CODE (arg1) == INTEGER_CST)
1606 return build_real_from_int_cst (type, arg1);
1607 if (TREE_CODE (arg1) == REAL_CST)
1608 {
1609 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1610 {
1611 /* We make a copy of ARG1 so that we don't modify an
1612 existing constant tree. */
1613 t = copy_node (arg1);
1614 TREE_TYPE (t) = type;
1615 return t;
1616 }
1617
1618 t = build_real (type,
1619 real_value_truncate (TYPE_MODE (type),
1620 TREE_REAL_CST (arg1)));
1621
1622 TREE_OVERFLOW (t)
1623 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1624 TREE_CONSTANT_OVERFLOW (t)
1625 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1626 return t;
1627 }
1628 }
1629 TREE_CONSTANT (t) = 1;
1630 return t;
1631 }
1632 \f
1633 /* Return an expr equal to X but certainly not valid as an lvalue. */
1634
1635 tree
1636 non_lvalue (tree x)
1637 {
1638 tree result;
1639
1640 /* These things are certainly not lvalues. */
1641 if (TREE_CODE (x) == NON_LVALUE_EXPR
1642 || TREE_CODE (x) == INTEGER_CST
1643 || TREE_CODE (x) == REAL_CST
1644 || TREE_CODE (x) == STRING_CST
1645 || TREE_CODE (x) == ADDR_EXPR)
1646 return x;
1647
1648 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1649 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1650 return result;
1651 }
1652
1653 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1654 Zero means allow extended lvalues. */
1655
1656 int pedantic_lvalues;
1657
1658 /* When pedantic, return an expr equal to X but certainly not valid as a
1659 pedantic lvalue. Otherwise, return X. */
1660
1661 tree
1662 pedantic_non_lvalue (tree x)
1663 {
1664 if (pedantic_lvalues)
1665 return non_lvalue (x);
1666 else
1667 return x;
1668 }
1669 \f
1670 /* Given a tree comparison code, return the code that is the logical inverse
1671 of the given code. It is not safe to do this for floating-point
1672 comparisons, except for NE_EXPR and EQ_EXPR. */
1673
1674 static enum tree_code
1675 invert_tree_comparison (enum tree_code code)
1676 {
1677 switch (code)
1678 {
1679 case EQ_EXPR:
1680 return NE_EXPR;
1681 case NE_EXPR:
1682 return EQ_EXPR;
1683 case GT_EXPR:
1684 return LE_EXPR;
1685 case GE_EXPR:
1686 return LT_EXPR;
1687 case LT_EXPR:
1688 return GE_EXPR;
1689 case LE_EXPR:
1690 return GT_EXPR;
1691 default:
1692 abort ();
1693 }
1694 }
1695
1696 /* Similar, but return the comparison that results if the operands are
1697 swapped. This is safe for floating-point. */
1698
1699 static enum tree_code
1700 swap_tree_comparison (enum tree_code code)
1701 {
1702 switch (code)
1703 {
1704 case EQ_EXPR:
1705 case NE_EXPR:
1706 return code;
1707 case GT_EXPR:
1708 return LT_EXPR;
1709 case GE_EXPR:
1710 return LE_EXPR;
1711 case LT_EXPR:
1712 return GT_EXPR;
1713 case LE_EXPR:
1714 return GE_EXPR;
1715 default:
1716 abort ();
1717 }
1718 }
1719
1720
1721 /* Convert a comparison tree code from an enum tree_code representation
1722 into a compcode bit-based encoding. This function is the inverse of
1723 compcode_to_comparison. */
1724
1725 static int
1726 comparison_to_compcode (enum tree_code code)
1727 {
1728 switch (code)
1729 {
1730 case LT_EXPR:
1731 return COMPCODE_LT;
1732 case EQ_EXPR:
1733 return COMPCODE_EQ;
1734 case LE_EXPR:
1735 return COMPCODE_LE;
1736 case GT_EXPR:
1737 return COMPCODE_GT;
1738 case NE_EXPR:
1739 return COMPCODE_NE;
1740 case GE_EXPR:
1741 return COMPCODE_GE;
1742 default:
1743 abort ();
1744 }
1745 }
1746
1747 /* Convert a compcode bit-based encoding of a comparison operator back
1748 to GCC's enum tree_code representation. This function is the
1749 inverse of comparison_to_compcode. */
1750
1751 static enum tree_code
1752 compcode_to_comparison (int code)
1753 {
1754 switch (code)
1755 {
1756 case COMPCODE_LT:
1757 return LT_EXPR;
1758 case COMPCODE_EQ:
1759 return EQ_EXPR;
1760 case COMPCODE_LE:
1761 return LE_EXPR;
1762 case COMPCODE_GT:
1763 return GT_EXPR;
1764 case COMPCODE_NE:
1765 return NE_EXPR;
1766 case COMPCODE_GE:
1767 return GE_EXPR;
1768 default:
1769 abort ();
1770 }
1771 }
1772
1773 /* Return nonzero if CODE is a tree code that represents a truth value. */
1774
1775 static int
1776 truth_value_p (enum tree_code code)
1777 {
1778 return (TREE_CODE_CLASS (code) == '<'
1779 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1780 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1781 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1782 }
1783 \f
1784 /* Return nonzero if two operands are necessarily equal.
1785 If ONLY_CONST is nonzero, only return nonzero for constants.
1786 This function tests whether the operands are indistinguishable;
1787 it does not test whether they are equal using C's == operation.
1788 The distinction is important for IEEE floating point, because
1789 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1790 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1791
1792 int
1793 operand_equal_p (tree arg0, tree arg1, int only_const)
1794 {
1795 /* If both types don't have the same signedness, then we can't consider
1796 them equal. We must check this before the STRIP_NOPS calls
1797 because they may change the signedness of the arguments. */
1798 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1799 return 0;
1800
1801 STRIP_NOPS (arg0);
1802 STRIP_NOPS (arg1);
1803
1804 if (TREE_CODE (arg0) != TREE_CODE (arg1)
1805 /* This is needed for conversions and for COMPONENT_REF.
1806 Might as well play it safe and always test this. */
1807 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
1808 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
1809 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1810 return 0;
1811
1812 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1813 We don't care about side effects in that case because the SAVE_EXPR
1814 takes care of that for us. In all other cases, two expressions are
1815 equal if they have no side effects. If we have two identical
1816 expressions with side effects that should be treated the same due
1817 to the only side effects being identical SAVE_EXPR's, that will
1818 be detected in the recursive calls below. */
1819 if (arg0 == arg1 && ! only_const
1820 && (TREE_CODE (arg0) == SAVE_EXPR
1821 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
1822 return 1;
1823
1824 /* Next handle constant cases, those for which we can return 1 even
1825 if ONLY_CONST is set. */
1826 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
1827 switch (TREE_CODE (arg0))
1828 {
1829 case INTEGER_CST:
1830 return (! TREE_CONSTANT_OVERFLOW (arg0)
1831 && ! TREE_CONSTANT_OVERFLOW (arg1)
1832 && tree_int_cst_equal (arg0, arg1));
1833
1834 case REAL_CST:
1835 return (! TREE_CONSTANT_OVERFLOW (arg0)
1836 && ! TREE_CONSTANT_OVERFLOW (arg1)
1837 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
1838 TREE_REAL_CST (arg1)));
1839
1840 case VECTOR_CST:
1841 {
1842 tree v1, v2;
1843
1844 if (TREE_CONSTANT_OVERFLOW (arg0)
1845 || TREE_CONSTANT_OVERFLOW (arg1))
1846 return 0;
1847
1848 v1 = TREE_VECTOR_CST_ELTS (arg0);
1849 v2 = TREE_VECTOR_CST_ELTS (arg1);
1850 while (v1 && v2)
1851 {
1852 if (!operand_equal_p (v1, v2, only_const))
1853 return 0;
1854 v1 = TREE_CHAIN (v1);
1855 v2 = TREE_CHAIN (v2);
1856 }
1857
1858 return 1;
1859 }
1860
1861 case COMPLEX_CST:
1862 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
1863 only_const)
1864 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
1865 only_const));
1866
1867 case STRING_CST:
1868 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
1869 && ! memcmp (TREE_STRING_POINTER (arg0),
1870 TREE_STRING_POINTER (arg1),
1871 TREE_STRING_LENGTH (arg0)));
1872
1873 case ADDR_EXPR:
1874 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
1875 0);
1876 default:
1877 break;
1878 }
1879
1880 if (only_const)
1881 return 0;
1882
1883 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1884 {
1885 case '1':
1886 /* Two conversions are equal only if signedness and modes match. */
1887 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1888 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1889 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1890 return 0;
1891
1892 return operand_equal_p (TREE_OPERAND (arg0, 0),
1893 TREE_OPERAND (arg1, 0), 0);
1894
1895 case '<':
1896 case '2':
1897 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
1898 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
1899 0))
1900 return 1;
1901
1902 /* For commutative ops, allow the other order. */
1903 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
1904 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
1905 || TREE_CODE (arg0) == BIT_IOR_EXPR
1906 || TREE_CODE (arg0) == BIT_XOR_EXPR
1907 || TREE_CODE (arg0) == BIT_AND_EXPR
1908 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
1909 && operand_equal_p (TREE_OPERAND (arg0, 0),
1910 TREE_OPERAND (arg1, 1), 0)
1911 && operand_equal_p (TREE_OPERAND (arg0, 1),
1912 TREE_OPERAND (arg1, 0), 0));
1913
1914 case 'r':
1915 /* If either of the pointer (or reference) expressions we are
1916 dereferencing contain a side effect, these cannot be equal. */
1917 if (TREE_SIDE_EFFECTS (arg0)
1918 || TREE_SIDE_EFFECTS (arg1))
1919 return 0;
1920
1921 switch (TREE_CODE (arg0))
1922 {
1923 case INDIRECT_REF:
1924 return operand_equal_p (TREE_OPERAND (arg0, 0),
1925 TREE_OPERAND (arg1, 0), 0);
1926
1927 case COMPONENT_REF:
1928 case ARRAY_REF:
1929 case ARRAY_RANGE_REF:
1930 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1931 TREE_OPERAND (arg1, 0), 0)
1932 && operand_equal_p (TREE_OPERAND (arg0, 1),
1933 TREE_OPERAND (arg1, 1), 0));
1934
1935 case BIT_FIELD_REF:
1936 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1937 TREE_OPERAND (arg1, 0), 0)
1938 && operand_equal_p (TREE_OPERAND (arg0, 1),
1939 TREE_OPERAND (arg1, 1), 0)
1940 && operand_equal_p (TREE_OPERAND (arg0, 2),
1941 TREE_OPERAND (arg1, 2), 0));
1942 default:
1943 return 0;
1944 }
1945
1946 case 'e':
1947 switch (TREE_CODE (arg0))
1948 {
1949 case ADDR_EXPR:
1950 case TRUTH_NOT_EXPR:
1951 return operand_equal_p (TREE_OPERAND (arg0, 0),
1952 TREE_OPERAND (arg1, 0), 0);
1953
1954 case RTL_EXPR:
1955 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
1956
1957 case CALL_EXPR:
1958 /* If the CALL_EXPRs call different functions, then they
1959 clearly can not be equal. */
1960 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
1961 TREE_OPERAND (arg1, 0), 0))
1962 return 0;
1963
1964 /* Only consider const functions equivalent. */
1965 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR)
1966 {
1967 tree fndecl = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
1968 if (! (flags_from_decl_or_type (fndecl) & ECF_CONST))
1969 return 0;
1970 }
1971 else
1972 return 0;
1973
1974 /* Now see if all the arguments are the same. operand_equal_p
1975 does not handle TREE_LIST, so we walk the operands here
1976 feeding them to operand_equal_p. */
1977 arg0 = TREE_OPERAND (arg0, 1);
1978 arg1 = TREE_OPERAND (arg1, 1);
1979 while (arg0 && arg1)
1980 {
1981 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), 0))
1982 return 0;
1983
1984 arg0 = TREE_CHAIN (arg0);
1985 arg1 = TREE_CHAIN (arg1);
1986 }
1987
1988 /* If we get here and both argument lists are exhausted
1989 then the CALL_EXPRs are equal. */
1990 return ! (arg0 || arg1);
1991
1992 default:
1993 return 0;
1994 }
1995
1996 case 'd':
1997 /* Consider __builtin_sqrt equal to sqrt. */
1998 return TREE_CODE (arg0) == FUNCTION_DECL
1999 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2000 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2001 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1);
2002
2003 default:
2004 return 0;
2005 }
2006 }
2007 \f
2008 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2009 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2010
2011 When in doubt, return 0. */
2012
2013 static int
2014 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2015 {
2016 int unsignedp1, unsignedpo;
2017 tree primarg0, primarg1, primother;
2018 unsigned int correct_width;
2019
2020 if (operand_equal_p (arg0, arg1, 0))
2021 return 1;
2022
2023 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2024 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2025 return 0;
2026
2027 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2028 and see if the inner values are the same. This removes any
2029 signedness comparison, which doesn't matter here. */
2030 primarg0 = arg0, primarg1 = arg1;
2031 STRIP_NOPS (primarg0);
2032 STRIP_NOPS (primarg1);
2033 if (operand_equal_p (primarg0, primarg1, 0))
2034 return 1;
2035
2036 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2037 actual comparison operand, ARG0.
2038
2039 First throw away any conversions to wider types
2040 already present in the operands. */
2041
2042 primarg1 = get_narrower (arg1, &unsignedp1);
2043 primother = get_narrower (other, &unsignedpo);
2044
2045 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2046 if (unsignedp1 == unsignedpo
2047 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2048 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2049 {
2050 tree type = TREE_TYPE (arg0);
2051
2052 /* Make sure shorter operand is extended the right way
2053 to match the longer operand. */
2054 primarg1 = convert ((*lang_hooks.types.signed_or_unsigned_type)
2055 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2056
2057 if (operand_equal_p (arg0, convert (type, primarg1), 0))
2058 return 1;
2059 }
2060
2061 return 0;
2062 }
2063 \f
2064 /* See if ARG is an expression that is either a comparison or is performing
2065 arithmetic on comparisons. The comparisons must only be comparing
2066 two different values, which will be stored in *CVAL1 and *CVAL2; if
2067 they are nonzero it means that some operands have already been found.
2068 No variables may be used anywhere else in the expression except in the
2069 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2070 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2071
2072 If this is true, return 1. Otherwise, return zero. */
2073
2074 static int
2075 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2076 {
2077 enum tree_code code = TREE_CODE (arg);
2078 char class = TREE_CODE_CLASS (code);
2079
2080 /* We can handle some of the 'e' cases here. */
2081 if (class == 'e' && code == TRUTH_NOT_EXPR)
2082 class = '1';
2083 else if (class == 'e'
2084 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2085 || code == COMPOUND_EXPR))
2086 class = '2';
2087
2088 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2089 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2090 {
2091 /* If we've already found a CVAL1 or CVAL2, this expression is
2092 two complex to handle. */
2093 if (*cval1 || *cval2)
2094 return 0;
2095
2096 class = '1';
2097 *save_p = 1;
2098 }
2099
2100 switch (class)
2101 {
2102 case '1':
2103 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2104
2105 case '2':
2106 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2107 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2108 cval1, cval2, save_p));
2109
2110 case 'c':
2111 return 1;
2112
2113 case 'e':
2114 if (code == COND_EXPR)
2115 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2116 cval1, cval2, save_p)
2117 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2118 cval1, cval2, save_p)
2119 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2120 cval1, cval2, save_p));
2121 return 0;
2122
2123 case '<':
2124 /* First see if we can handle the first operand, then the second. For
2125 the second operand, we know *CVAL1 can't be zero. It must be that
2126 one side of the comparison is each of the values; test for the
2127 case where this isn't true by failing if the two operands
2128 are the same. */
2129
2130 if (operand_equal_p (TREE_OPERAND (arg, 0),
2131 TREE_OPERAND (arg, 1), 0))
2132 return 0;
2133
2134 if (*cval1 == 0)
2135 *cval1 = TREE_OPERAND (arg, 0);
2136 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2137 ;
2138 else if (*cval2 == 0)
2139 *cval2 = TREE_OPERAND (arg, 0);
2140 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2141 ;
2142 else
2143 return 0;
2144
2145 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2146 ;
2147 else if (*cval2 == 0)
2148 *cval2 = TREE_OPERAND (arg, 1);
2149 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2150 ;
2151 else
2152 return 0;
2153
2154 return 1;
2155
2156 default:
2157 return 0;
2158 }
2159 }
2160 \f
2161 /* ARG is a tree that is known to contain just arithmetic operations and
2162 comparisons. Evaluate the operations in the tree substituting NEW0 for
2163 any occurrence of OLD0 as an operand of a comparison and likewise for
2164 NEW1 and OLD1. */
2165
2166 static tree
2167 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2168 {
2169 tree type = TREE_TYPE (arg);
2170 enum tree_code code = TREE_CODE (arg);
2171 char class = TREE_CODE_CLASS (code);
2172
2173 /* We can handle some of the 'e' cases here. */
2174 if (class == 'e' && code == TRUTH_NOT_EXPR)
2175 class = '1';
2176 else if (class == 'e'
2177 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2178 class = '2';
2179
2180 switch (class)
2181 {
2182 case '1':
2183 return fold (build1 (code, type,
2184 eval_subst (TREE_OPERAND (arg, 0),
2185 old0, new0, old1, new1)));
2186
2187 case '2':
2188 return fold (build (code, type,
2189 eval_subst (TREE_OPERAND (arg, 0),
2190 old0, new0, old1, new1),
2191 eval_subst (TREE_OPERAND (arg, 1),
2192 old0, new0, old1, new1)));
2193
2194 case 'e':
2195 switch (code)
2196 {
2197 case SAVE_EXPR:
2198 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2199
2200 case COMPOUND_EXPR:
2201 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2202
2203 case COND_EXPR:
2204 return fold (build (code, type,
2205 eval_subst (TREE_OPERAND (arg, 0),
2206 old0, new0, old1, new1),
2207 eval_subst (TREE_OPERAND (arg, 1),
2208 old0, new0, old1, new1),
2209 eval_subst (TREE_OPERAND (arg, 2),
2210 old0, new0, old1, new1)));
2211 default:
2212 break;
2213 }
2214 /* Fall through - ??? */
2215
2216 case '<':
2217 {
2218 tree arg0 = TREE_OPERAND (arg, 0);
2219 tree arg1 = TREE_OPERAND (arg, 1);
2220
2221 /* We need to check both for exact equality and tree equality. The
2222 former will be true if the operand has a side-effect. In that
2223 case, we know the operand occurred exactly once. */
2224
2225 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2226 arg0 = new0;
2227 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2228 arg0 = new1;
2229
2230 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2231 arg1 = new0;
2232 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2233 arg1 = new1;
2234
2235 return fold (build (code, type, arg0, arg1));
2236 }
2237
2238 default:
2239 return arg;
2240 }
2241 }
2242 \f
2243 /* Return a tree for the case when the result of an expression is RESULT
2244 converted to TYPE and OMITTED was previously an operand of the expression
2245 but is now not needed (e.g., we folded OMITTED * 0).
2246
2247 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2248 the conversion of RESULT to TYPE. */
2249
2250 tree
2251 omit_one_operand (tree type, tree result, tree omitted)
2252 {
2253 tree t = convert (type, result);
2254
2255 if (TREE_SIDE_EFFECTS (omitted))
2256 return build (COMPOUND_EXPR, type, omitted, t);
2257
2258 return non_lvalue (t);
2259 }
2260
2261 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2262
2263 static tree
2264 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2265 {
2266 tree t = convert (type, result);
2267
2268 if (TREE_SIDE_EFFECTS (omitted))
2269 return build (COMPOUND_EXPR, type, omitted, t);
2270
2271 return pedantic_non_lvalue (t);
2272 }
2273 \f
2274 /* Return a simplified tree node for the truth-negation of ARG. This
2275 never alters ARG itself. We assume that ARG is an operation that
2276 returns a truth value (0 or 1). */
2277
2278 tree
2279 invert_truthvalue (tree arg)
2280 {
2281 tree type = TREE_TYPE (arg);
2282 enum tree_code code = TREE_CODE (arg);
2283
2284 if (code == ERROR_MARK)
2285 return arg;
2286
2287 /* If this is a comparison, we can simply invert it, except for
2288 floating-point non-equality comparisons, in which case we just
2289 enclose a TRUTH_NOT_EXPR around what we have. */
2290
2291 if (TREE_CODE_CLASS (code) == '<')
2292 {
2293 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2294 && !flag_unsafe_math_optimizations
2295 && code != NE_EXPR
2296 && code != EQ_EXPR)
2297 return build1 (TRUTH_NOT_EXPR, type, arg);
2298 else
2299 return build (invert_tree_comparison (code), type,
2300 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2301 }
2302
2303 switch (code)
2304 {
2305 case INTEGER_CST:
2306 return convert (type, build_int_2 (integer_zerop (arg), 0));
2307
2308 case TRUTH_AND_EXPR:
2309 return build (TRUTH_OR_EXPR, type,
2310 invert_truthvalue (TREE_OPERAND (arg, 0)),
2311 invert_truthvalue (TREE_OPERAND (arg, 1)));
2312
2313 case TRUTH_OR_EXPR:
2314 return build (TRUTH_AND_EXPR, type,
2315 invert_truthvalue (TREE_OPERAND (arg, 0)),
2316 invert_truthvalue (TREE_OPERAND (arg, 1)));
2317
2318 case TRUTH_XOR_EXPR:
2319 /* Here we can invert either operand. We invert the first operand
2320 unless the second operand is a TRUTH_NOT_EXPR in which case our
2321 result is the XOR of the first operand with the inside of the
2322 negation of the second operand. */
2323
2324 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2325 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2326 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2327 else
2328 return build (TRUTH_XOR_EXPR, type,
2329 invert_truthvalue (TREE_OPERAND (arg, 0)),
2330 TREE_OPERAND (arg, 1));
2331
2332 case TRUTH_ANDIF_EXPR:
2333 return build (TRUTH_ORIF_EXPR, type,
2334 invert_truthvalue (TREE_OPERAND (arg, 0)),
2335 invert_truthvalue (TREE_OPERAND (arg, 1)));
2336
2337 case TRUTH_ORIF_EXPR:
2338 return build (TRUTH_ANDIF_EXPR, type,
2339 invert_truthvalue (TREE_OPERAND (arg, 0)),
2340 invert_truthvalue (TREE_OPERAND (arg, 1)));
2341
2342 case TRUTH_NOT_EXPR:
2343 return TREE_OPERAND (arg, 0);
2344
2345 case COND_EXPR:
2346 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2347 invert_truthvalue (TREE_OPERAND (arg, 1)),
2348 invert_truthvalue (TREE_OPERAND (arg, 2)));
2349
2350 case COMPOUND_EXPR:
2351 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2352 invert_truthvalue (TREE_OPERAND (arg, 1)));
2353
2354 case WITH_RECORD_EXPR:
2355 return build (WITH_RECORD_EXPR, type,
2356 invert_truthvalue (TREE_OPERAND (arg, 0)),
2357 TREE_OPERAND (arg, 1));
2358
2359 case NON_LVALUE_EXPR:
2360 return invert_truthvalue (TREE_OPERAND (arg, 0));
2361
2362 case NOP_EXPR:
2363 case CONVERT_EXPR:
2364 case FLOAT_EXPR:
2365 return build1 (TREE_CODE (arg), type,
2366 invert_truthvalue (TREE_OPERAND (arg, 0)));
2367
2368 case BIT_AND_EXPR:
2369 if (!integer_onep (TREE_OPERAND (arg, 1)))
2370 break;
2371 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2372
2373 case SAVE_EXPR:
2374 return build1 (TRUTH_NOT_EXPR, type, arg);
2375
2376 case CLEANUP_POINT_EXPR:
2377 return build1 (CLEANUP_POINT_EXPR, type,
2378 invert_truthvalue (TREE_OPERAND (arg, 0)));
2379
2380 default:
2381 break;
2382 }
2383 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2384 abort ();
2385 return build1 (TRUTH_NOT_EXPR, type, arg);
2386 }
2387
2388 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2389 operands are another bit-wise operation with a common input. If so,
2390 distribute the bit operations to save an operation and possibly two if
2391 constants are involved. For example, convert
2392 (A | B) & (A | C) into A | (B & C)
2393 Further simplification will occur if B and C are constants.
2394
2395 If this optimization cannot be done, 0 will be returned. */
2396
2397 static tree
2398 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2399 {
2400 tree common;
2401 tree left, right;
2402
2403 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2404 || TREE_CODE (arg0) == code
2405 || (TREE_CODE (arg0) != BIT_AND_EXPR
2406 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2407 return 0;
2408
2409 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2410 {
2411 common = TREE_OPERAND (arg0, 0);
2412 left = TREE_OPERAND (arg0, 1);
2413 right = TREE_OPERAND (arg1, 1);
2414 }
2415 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2416 {
2417 common = TREE_OPERAND (arg0, 0);
2418 left = TREE_OPERAND (arg0, 1);
2419 right = TREE_OPERAND (arg1, 0);
2420 }
2421 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2422 {
2423 common = TREE_OPERAND (arg0, 1);
2424 left = TREE_OPERAND (arg0, 0);
2425 right = TREE_OPERAND (arg1, 1);
2426 }
2427 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2428 {
2429 common = TREE_OPERAND (arg0, 1);
2430 left = TREE_OPERAND (arg0, 0);
2431 right = TREE_OPERAND (arg1, 0);
2432 }
2433 else
2434 return 0;
2435
2436 return fold (build (TREE_CODE (arg0), type, common,
2437 fold (build (code, type, left, right))));
2438 }
2439 \f
2440 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2441 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2442
2443 static tree
2444 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
2445 int unsignedp)
2446 {
2447 tree result = build (BIT_FIELD_REF, type, inner,
2448 size_int (bitsize), bitsize_int (bitpos));
2449
2450 TREE_UNSIGNED (result) = unsignedp;
2451
2452 return result;
2453 }
2454
2455 /* Optimize a bit-field compare.
2456
2457 There are two cases: First is a compare against a constant and the
2458 second is a comparison of two items where the fields are at the same
2459 bit position relative to the start of a chunk (byte, halfword, word)
2460 large enough to contain it. In these cases we can avoid the shift
2461 implicit in bitfield extractions.
2462
2463 For constants, we emit a compare of the shifted constant with the
2464 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2465 compared. For two fields at the same position, we do the ANDs with the
2466 similar mask and compare the result of the ANDs.
2467
2468 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2469 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2470 are the left and right operands of the comparison, respectively.
2471
2472 If the optimization described above can be done, we return the resulting
2473 tree. Otherwise we return zero. */
2474
2475 static tree
2476 optimize_bit_field_compare (enum tree_code code, tree compare_type,
2477 tree lhs, tree rhs)
2478 {
2479 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2480 tree type = TREE_TYPE (lhs);
2481 tree signed_type, unsigned_type;
2482 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2483 enum machine_mode lmode, rmode, nmode;
2484 int lunsignedp, runsignedp;
2485 int lvolatilep = 0, rvolatilep = 0;
2486 tree linner, rinner = NULL_TREE;
2487 tree mask;
2488 tree offset;
2489
2490 /* Get all the information about the extractions being done. If the bit size
2491 if the same as the size of the underlying object, we aren't doing an
2492 extraction at all and so can do nothing. We also don't want to
2493 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2494 then will no longer be able to replace it. */
2495 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2496 &lunsignedp, &lvolatilep);
2497 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2498 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2499 return 0;
2500
2501 if (!const_p)
2502 {
2503 /* If this is not a constant, we can only do something if bit positions,
2504 sizes, and signedness are the same. */
2505 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2506 &runsignedp, &rvolatilep);
2507
2508 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2509 || lunsignedp != runsignedp || offset != 0
2510 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2511 return 0;
2512 }
2513
2514 /* See if we can find a mode to refer to this field. We should be able to,
2515 but fail if we can't. */
2516 nmode = get_best_mode (lbitsize, lbitpos,
2517 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2518 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2519 TYPE_ALIGN (TREE_TYPE (rinner))),
2520 word_mode, lvolatilep || rvolatilep);
2521 if (nmode == VOIDmode)
2522 return 0;
2523
2524 /* Set signed and unsigned types of the precision of this mode for the
2525 shifts below. */
2526 signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2527 unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2528
2529 /* Compute the bit position and size for the new reference and our offset
2530 within it. If the new reference is the same size as the original, we
2531 won't optimize anything, so return zero. */
2532 nbitsize = GET_MODE_BITSIZE (nmode);
2533 nbitpos = lbitpos & ~ (nbitsize - 1);
2534 lbitpos -= nbitpos;
2535 if (nbitsize == lbitsize)
2536 return 0;
2537
2538 if (BYTES_BIG_ENDIAN)
2539 lbitpos = nbitsize - lbitsize - lbitpos;
2540
2541 /* Make the mask to be used against the extracted field. */
2542 mask = build_int_2 (~0, ~0);
2543 TREE_TYPE (mask) = unsigned_type;
2544 force_fit_type (mask, 0);
2545 mask = convert (unsigned_type, mask);
2546 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2547 mask = const_binop (RSHIFT_EXPR, mask,
2548 size_int (nbitsize - lbitsize - lbitpos), 0);
2549
2550 if (! const_p)
2551 /* If not comparing with constant, just rework the comparison
2552 and return. */
2553 return build (code, compare_type,
2554 build (BIT_AND_EXPR, unsigned_type,
2555 make_bit_field_ref (linner, unsigned_type,
2556 nbitsize, nbitpos, 1),
2557 mask),
2558 build (BIT_AND_EXPR, unsigned_type,
2559 make_bit_field_ref (rinner, unsigned_type,
2560 nbitsize, nbitpos, 1),
2561 mask));
2562
2563 /* Otherwise, we are handling the constant case. See if the constant is too
2564 big for the field. Warn and return a tree of for 0 (false) if so. We do
2565 this not only for its own sake, but to avoid having to test for this
2566 error case below. If we didn't, we might generate wrong code.
2567
2568 For unsigned fields, the constant shifted right by the field length should
2569 be all zero. For signed fields, the high-order bits should agree with
2570 the sign bit. */
2571
2572 if (lunsignedp)
2573 {
2574 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2575 convert (unsigned_type, rhs),
2576 size_int (lbitsize), 0)))
2577 {
2578 warning ("comparison is always %d due to width of bit-field",
2579 code == NE_EXPR);
2580 return convert (compare_type,
2581 (code == NE_EXPR
2582 ? integer_one_node : integer_zero_node));
2583 }
2584 }
2585 else
2586 {
2587 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2588 size_int (lbitsize - 1), 0);
2589 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2590 {
2591 warning ("comparison is always %d due to width of bit-field",
2592 code == NE_EXPR);
2593 return convert (compare_type,
2594 (code == NE_EXPR
2595 ? integer_one_node : integer_zero_node));
2596 }
2597 }
2598
2599 /* Single-bit compares should always be against zero. */
2600 if (lbitsize == 1 && ! integer_zerop (rhs))
2601 {
2602 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2603 rhs = convert (type, integer_zero_node);
2604 }
2605
2606 /* Make a new bitfield reference, shift the constant over the
2607 appropriate number of bits and mask it with the computed mask
2608 (in case this was a signed field). If we changed it, make a new one. */
2609 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2610 if (lvolatilep)
2611 {
2612 TREE_SIDE_EFFECTS (lhs) = 1;
2613 TREE_THIS_VOLATILE (lhs) = 1;
2614 }
2615
2616 rhs = fold (const_binop (BIT_AND_EXPR,
2617 const_binop (LSHIFT_EXPR,
2618 convert (unsigned_type, rhs),
2619 size_int (lbitpos), 0),
2620 mask, 0));
2621
2622 return build (code, compare_type,
2623 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2624 rhs);
2625 }
2626 \f
2627 /* Subroutine for fold_truthop: decode a field reference.
2628
2629 If EXP is a comparison reference, we return the innermost reference.
2630
2631 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2632 set to the starting bit number.
2633
2634 If the innermost field can be completely contained in a mode-sized
2635 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2636
2637 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2638 otherwise it is not changed.
2639
2640 *PUNSIGNEDP is set to the signedness of the field.
2641
2642 *PMASK is set to the mask used. This is either contained in a
2643 BIT_AND_EXPR or derived from the width of the field.
2644
2645 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2646
2647 Return 0 if this is not a component reference or is one that we can't
2648 do anything with. */
2649
2650 static tree
2651 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
2652 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
2653 int *punsignedp, int *pvolatilep,
2654 tree *pmask, tree *pand_mask)
2655 {
2656 tree outer_type = 0;
2657 tree and_mask = 0;
2658 tree mask, inner, offset;
2659 tree unsigned_type;
2660 unsigned int precision;
2661
2662 /* All the optimizations using this function assume integer fields.
2663 There are problems with FP fields since the type_for_size call
2664 below can fail for, e.g., XFmode. */
2665 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2666 return 0;
2667
2668 /* We are interested in the bare arrangement of bits, so strip everything
2669 that doesn't affect the machine mode. However, record the type of the
2670 outermost expression if it may matter below. */
2671 if (TREE_CODE (exp) == NOP_EXPR
2672 || TREE_CODE (exp) == CONVERT_EXPR
2673 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2674 outer_type = TREE_TYPE (exp);
2675 STRIP_NOPS (exp);
2676
2677 if (TREE_CODE (exp) == BIT_AND_EXPR)
2678 {
2679 and_mask = TREE_OPERAND (exp, 1);
2680 exp = TREE_OPERAND (exp, 0);
2681 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2682 if (TREE_CODE (and_mask) != INTEGER_CST)
2683 return 0;
2684 }
2685
2686 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2687 punsignedp, pvolatilep);
2688 if ((inner == exp && and_mask == 0)
2689 || *pbitsize < 0 || offset != 0
2690 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
2691 return 0;
2692
2693 /* If the number of bits in the reference is the same as the bitsize of
2694 the outer type, then the outer type gives the signedness. Otherwise
2695 (in case of a small bitfield) the signedness is unchanged. */
2696 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
2697 *punsignedp = TREE_UNSIGNED (outer_type);
2698
2699 /* Compute the mask to access the bitfield. */
2700 unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
2701 precision = TYPE_PRECISION (unsigned_type);
2702
2703 mask = build_int_2 (~0, ~0);
2704 TREE_TYPE (mask) = unsigned_type;
2705 force_fit_type (mask, 0);
2706 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2707 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2708
2709 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2710 if (and_mask != 0)
2711 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2712 convert (unsigned_type, and_mask), mask));
2713
2714 *pmask = mask;
2715 *pand_mask = and_mask;
2716 return inner;
2717 }
2718
2719 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
2720 bit positions. */
2721
2722 static int
2723 all_ones_mask_p (tree mask, int size)
2724 {
2725 tree type = TREE_TYPE (mask);
2726 unsigned int precision = TYPE_PRECISION (type);
2727 tree tmask;
2728
2729 tmask = build_int_2 (~0, ~0);
2730 TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
2731 force_fit_type (tmask, 0);
2732 return
2733 tree_int_cst_equal (mask,
2734 const_binop (RSHIFT_EXPR,
2735 const_binop (LSHIFT_EXPR, tmask,
2736 size_int (precision - size),
2737 0),
2738 size_int (precision - size), 0));
2739 }
2740
2741 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
2742 represents the sign bit of EXP's type. If EXP represents a sign
2743 or zero extension, also test VAL against the unextended type.
2744 The return value is the (sub)expression whose sign bit is VAL,
2745 or NULL_TREE otherwise. */
2746
2747 static tree
2748 sign_bit_p (tree exp, tree val)
2749 {
2750 unsigned HOST_WIDE_INT mask_lo, lo;
2751 HOST_WIDE_INT mask_hi, hi;
2752 int width;
2753 tree t;
2754
2755 /* Tree EXP must have an integral type. */
2756 t = TREE_TYPE (exp);
2757 if (! INTEGRAL_TYPE_P (t))
2758 return NULL_TREE;
2759
2760 /* Tree VAL must be an integer constant. */
2761 if (TREE_CODE (val) != INTEGER_CST
2762 || TREE_CONSTANT_OVERFLOW (val))
2763 return NULL_TREE;
2764
2765 width = TYPE_PRECISION (t);
2766 if (width > HOST_BITS_PER_WIDE_INT)
2767 {
2768 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
2769 lo = 0;
2770
2771 mask_hi = ((unsigned HOST_WIDE_INT) -1
2772 >> (2 * HOST_BITS_PER_WIDE_INT - width));
2773 mask_lo = -1;
2774 }
2775 else
2776 {
2777 hi = 0;
2778 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
2779
2780 mask_hi = 0;
2781 mask_lo = ((unsigned HOST_WIDE_INT) -1
2782 >> (HOST_BITS_PER_WIDE_INT - width));
2783 }
2784
2785 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
2786 treat VAL as if it were unsigned. */
2787 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
2788 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
2789 return exp;
2790
2791 /* Handle extension from a narrower type. */
2792 if (TREE_CODE (exp) == NOP_EXPR
2793 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
2794 return sign_bit_p (TREE_OPERAND (exp, 0), val);
2795
2796 return NULL_TREE;
2797 }
2798
2799 /* Subroutine for fold_truthop: determine if an operand is simple enough
2800 to be evaluated unconditionally. */
2801
2802 static int
2803 simple_operand_p (tree exp)
2804 {
2805 /* Strip any conversions that don't change the machine mode. */
2806 while ((TREE_CODE (exp) == NOP_EXPR
2807 || TREE_CODE (exp) == CONVERT_EXPR)
2808 && (TYPE_MODE (TREE_TYPE (exp))
2809 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2810 exp = TREE_OPERAND (exp, 0);
2811
2812 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2813 || (DECL_P (exp)
2814 && ! TREE_ADDRESSABLE (exp)
2815 && ! TREE_THIS_VOLATILE (exp)
2816 && ! DECL_NONLOCAL (exp)
2817 /* Don't regard global variables as simple. They may be
2818 allocated in ways unknown to the compiler (shared memory,
2819 #pragma weak, etc). */
2820 && ! TREE_PUBLIC (exp)
2821 && ! DECL_EXTERNAL (exp)
2822 /* Loading a static variable is unduly expensive, but global
2823 registers aren't expensive. */
2824 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2825 }
2826 \f
2827 /* The following functions are subroutines to fold_range_test and allow it to
2828 try to change a logical combination of comparisons into a range test.
2829
2830 For example, both
2831 X == 2 || X == 3 || X == 4 || X == 5
2832 and
2833 X >= 2 && X <= 5
2834 are converted to
2835 (unsigned) (X - 2) <= 3
2836
2837 We describe each set of comparisons as being either inside or outside
2838 a range, using a variable named like IN_P, and then describe the
2839 range with a lower and upper bound. If one of the bounds is omitted,
2840 it represents either the highest or lowest value of the type.
2841
2842 In the comments below, we represent a range by two numbers in brackets
2843 preceded by a "+" to designate being inside that range, or a "-" to
2844 designate being outside that range, so the condition can be inverted by
2845 flipping the prefix. An omitted bound is represented by a "-". For
2846 example, "- [-, 10]" means being outside the range starting at the lowest
2847 possible value and ending at 10, in other words, being greater than 10.
2848 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
2849 always false.
2850
2851 We set up things so that the missing bounds are handled in a consistent
2852 manner so neither a missing bound nor "true" and "false" need to be
2853 handled using a special case. */
2854
2855 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
2856 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
2857 and UPPER1_P are nonzero if the respective argument is an upper bound
2858 and zero for a lower. TYPE, if nonzero, is the type of the result; it
2859 must be specified for a comparison. ARG1 will be converted to ARG0's
2860 type if both are specified. */
2861
2862 static tree
2863 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
2864 tree arg1, int upper1_p)
2865 {
2866 tree tem;
2867 int result;
2868 int sgn0, sgn1;
2869
2870 /* If neither arg represents infinity, do the normal operation.
2871 Else, if not a comparison, return infinity. Else handle the special
2872 comparison rules. Note that most of the cases below won't occur, but
2873 are handled for consistency. */
2874
2875 if (arg0 != 0 && arg1 != 0)
2876 {
2877 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
2878 arg0, convert (TREE_TYPE (arg0), arg1)));
2879 STRIP_NOPS (tem);
2880 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
2881 }
2882
2883 if (TREE_CODE_CLASS (code) != '<')
2884 return 0;
2885
2886 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
2887 for neither. In real maths, we cannot assume open ended ranges are
2888 the same. But, this is computer arithmetic, where numbers are finite.
2889 We can therefore make the transformation of any unbounded range with
2890 the value Z, Z being greater than any representable number. This permits
2891 us to treat unbounded ranges as equal. */
2892 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
2893 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
2894 switch (code)
2895 {
2896 case EQ_EXPR:
2897 result = sgn0 == sgn1;
2898 break;
2899 case NE_EXPR:
2900 result = sgn0 != sgn1;
2901 break;
2902 case LT_EXPR:
2903 result = sgn0 < sgn1;
2904 break;
2905 case LE_EXPR:
2906 result = sgn0 <= sgn1;
2907 break;
2908 case GT_EXPR:
2909 result = sgn0 > sgn1;
2910 break;
2911 case GE_EXPR:
2912 result = sgn0 >= sgn1;
2913 break;
2914 default:
2915 abort ();
2916 }
2917
2918 return convert (type, result ? integer_one_node : integer_zero_node);
2919 }
2920 \f
2921 /* Given EXP, a logical expression, set the range it is testing into
2922 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
2923 actually being tested. *PLOW and *PHIGH will be made of the same type
2924 as the returned expression. If EXP is not a comparison, we will most
2925 likely not be returning a useful value and range. */
2926
2927 static tree
2928 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
2929 {
2930 enum tree_code code;
2931 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
2932 tree orig_type = NULL_TREE;
2933 int in_p, n_in_p;
2934 tree low, high, n_low, n_high;
2935
2936 /* Start with simply saying "EXP != 0" and then look at the code of EXP
2937 and see if we can refine the range. Some of the cases below may not
2938 happen, but it doesn't seem worth worrying about this. We "continue"
2939 the outer loop when we've changed something; otherwise we "break"
2940 the switch, which will "break" the while. */
2941
2942 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
2943
2944 while (1)
2945 {
2946 code = TREE_CODE (exp);
2947
2948 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
2949 {
2950 if (first_rtl_op (code) > 0)
2951 arg0 = TREE_OPERAND (exp, 0);
2952 if (TREE_CODE_CLASS (code) == '<'
2953 || TREE_CODE_CLASS (code) == '1'
2954 || TREE_CODE_CLASS (code) == '2')
2955 type = TREE_TYPE (arg0);
2956 if (TREE_CODE_CLASS (code) == '2'
2957 || TREE_CODE_CLASS (code) == '<'
2958 || (TREE_CODE_CLASS (code) == 'e'
2959 && TREE_CODE_LENGTH (code) > 1))
2960 arg1 = TREE_OPERAND (exp, 1);
2961 }
2962
2963 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
2964 lose a cast by accident. */
2965 if (type != NULL_TREE && orig_type == NULL_TREE)
2966 orig_type = type;
2967
2968 switch (code)
2969 {
2970 case TRUTH_NOT_EXPR:
2971 in_p = ! in_p, exp = arg0;
2972 continue;
2973
2974 case EQ_EXPR: case NE_EXPR:
2975 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
2976 /* We can only do something if the range is testing for zero
2977 and if the second operand is an integer constant. Note that
2978 saying something is "in" the range we make is done by
2979 complementing IN_P since it will set in the initial case of
2980 being not equal to zero; "out" is leaving it alone. */
2981 if (low == 0 || high == 0
2982 || ! integer_zerop (low) || ! integer_zerop (high)
2983 || TREE_CODE (arg1) != INTEGER_CST)
2984 break;
2985
2986 switch (code)
2987 {
2988 case NE_EXPR: /* - [c, c] */
2989 low = high = arg1;
2990 break;
2991 case EQ_EXPR: /* + [c, c] */
2992 in_p = ! in_p, low = high = arg1;
2993 break;
2994 case GT_EXPR: /* - [-, c] */
2995 low = 0, high = arg1;
2996 break;
2997 case GE_EXPR: /* + [c, -] */
2998 in_p = ! in_p, low = arg1, high = 0;
2999 break;
3000 case LT_EXPR: /* - [c, -] */
3001 low = arg1, high = 0;
3002 break;
3003 case LE_EXPR: /* + [-, c] */
3004 in_p = ! in_p, low = 0, high = arg1;
3005 break;
3006 default:
3007 abort ();
3008 }
3009
3010 exp = arg0;
3011
3012 /* If this is an unsigned comparison, we also know that EXP is
3013 greater than or equal to zero. We base the range tests we make
3014 on that fact, so we record it here so we can parse existing
3015 range tests. */
3016 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3017 {
3018 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3019 1, convert (type, integer_zero_node),
3020 NULL_TREE))
3021 break;
3022
3023 in_p = n_in_p, low = n_low, high = n_high;
3024
3025 /* If the high bound is missing, but we
3026 have a low bound, reverse the range so
3027 it goes from zero to the low bound minus 1. */
3028 if (high == 0 && low)
3029 {
3030 in_p = ! in_p;
3031 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3032 integer_one_node, 0);
3033 low = convert (type, integer_zero_node);
3034 }
3035 }
3036 continue;
3037
3038 case NEGATE_EXPR:
3039 /* (-x) IN [a,b] -> x in [-b, -a] */
3040 n_low = range_binop (MINUS_EXPR, type,
3041 convert (type, integer_zero_node), 0, high, 1);
3042 n_high = range_binop (MINUS_EXPR, type,
3043 convert (type, integer_zero_node), 0, low, 0);
3044 low = n_low, high = n_high;
3045 exp = arg0;
3046 continue;
3047
3048 case BIT_NOT_EXPR:
3049 /* ~ X -> -X - 1 */
3050 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3051 convert (type, integer_one_node));
3052 continue;
3053
3054 case PLUS_EXPR: case MINUS_EXPR:
3055 if (TREE_CODE (arg1) != INTEGER_CST)
3056 break;
3057
3058 /* If EXP is signed, any overflow in the computation is undefined,
3059 so we don't worry about it so long as our computations on
3060 the bounds don't overflow. For unsigned, overflow is defined
3061 and this is exactly the right thing. */
3062 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3063 type, low, 0, arg1, 0);
3064 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3065 type, high, 1, arg1, 0);
3066 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3067 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3068 break;
3069
3070 /* Check for an unsigned range which has wrapped around the maximum
3071 value thus making n_high < n_low, and normalize it. */
3072 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3073 {
3074 low = range_binop (PLUS_EXPR, type, n_high, 0,
3075 integer_one_node, 0);
3076 high = range_binop (MINUS_EXPR, type, n_low, 0,
3077 integer_one_node, 0);
3078
3079 /* If the range is of the form +/- [ x+1, x ], we won't
3080 be able to normalize it. But then, it represents the
3081 whole range or the empty set, so make it
3082 +/- [ -, - ]. */
3083 if (tree_int_cst_equal (n_low, low)
3084 && tree_int_cst_equal (n_high, high))
3085 low = high = 0;
3086 else
3087 in_p = ! in_p;
3088 }
3089 else
3090 low = n_low, high = n_high;
3091
3092 exp = arg0;
3093 continue;
3094
3095 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3096 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3097 break;
3098
3099 if (! INTEGRAL_TYPE_P (type)
3100 || (low != 0 && ! int_fits_type_p (low, type))
3101 || (high != 0 && ! int_fits_type_p (high, type)))
3102 break;
3103
3104 n_low = low, n_high = high;
3105
3106 if (n_low != 0)
3107 n_low = convert (type, n_low);
3108
3109 if (n_high != 0)
3110 n_high = convert (type, n_high);
3111
3112 /* If we're converting from an unsigned to a signed type,
3113 we will be doing the comparison as unsigned. The tests above
3114 have already verified that LOW and HIGH are both positive.
3115
3116 So we have to make sure that the original unsigned value will
3117 be interpreted as positive. */
3118 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3119 {
3120 tree equiv_type = (*lang_hooks.types.type_for_mode)
3121 (TYPE_MODE (type), 1);
3122 tree high_positive;
3123
3124 /* A range without an upper bound is, naturally, unbounded.
3125 Since convert would have cropped a very large value, use
3126 the max value for the destination type. */
3127 high_positive
3128 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3129 : TYPE_MAX_VALUE (type);
3130
3131 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3132 high_positive = fold (build (RSHIFT_EXPR, type,
3133 convert (type, high_positive),
3134 convert (type, integer_one_node)));
3135
3136 /* If the low bound is specified, "and" the range with the
3137 range for which the original unsigned value will be
3138 positive. */
3139 if (low != 0)
3140 {
3141 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3142 1, n_low, n_high,
3143 1, convert (type, integer_zero_node),
3144 high_positive))
3145 break;
3146
3147 in_p = (n_in_p == in_p);
3148 }
3149 else
3150 {
3151 /* Otherwise, "or" the range with the range of the input
3152 that will be interpreted as negative. */
3153 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3154 0, n_low, n_high,
3155 1, convert (type, integer_zero_node),
3156 high_positive))
3157 break;
3158
3159 in_p = (in_p != n_in_p);
3160 }
3161 }
3162
3163 exp = arg0;
3164 low = n_low, high = n_high;
3165 continue;
3166
3167 default:
3168 break;
3169 }
3170
3171 break;
3172 }
3173
3174 /* If EXP is a constant, we can evaluate whether this is true or false. */
3175 if (TREE_CODE (exp) == INTEGER_CST)
3176 {
3177 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3178 exp, 0, low, 0))
3179 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3180 exp, 1, high, 1)));
3181 low = high = 0;
3182 exp = 0;
3183 }
3184
3185 *pin_p = in_p, *plow = low, *phigh = high;
3186 return exp;
3187 }
3188 \f
3189 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3190 type, TYPE, return an expression to test if EXP is in (or out of, depending
3191 on IN_P) the range. */
3192
3193 static tree
3194 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3195 {
3196 tree etype = TREE_TYPE (exp);
3197 tree value;
3198
3199 if (! in_p
3200 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3201 return invert_truthvalue (value);
3202
3203 if (low == 0 && high == 0)
3204 return convert (type, integer_one_node);
3205
3206 if (low == 0)
3207 return fold (build (LE_EXPR, type, exp, high));
3208
3209 if (high == 0)
3210 return fold (build (GE_EXPR, type, exp, low));
3211
3212 if (operand_equal_p (low, high, 0))
3213 return fold (build (EQ_EXPR, type, exp, low));
3214
3215 if (integer_zerop (low))
3216 {
3217 if (! TREE_UNSIGNED (etype))
3218 {
3219 etype = (*lang_hooks.types.unsigned_type) (etype);
3220 high = convert (etype, high);
3221 exp = convert (etype, exp);
3222 }
3223 return build_range_check (type, exp, 1, 0, high);
3224 }
3225
3226 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3227 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3228 {
3229 unsigned HOST_WIDE_INT lo;
3230 HOST_WIDE_INT hi;
3231 int prec;
3232
3233 prec = TYPE_PRECISION (etype);
3234 if (prec <= HOST_BITS_PER_WIDE_INT)
3235 {
3236 hi = 0;
3237 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3238 }
3239 else
3240 {
3241 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3242 lo = (unsigned HOST_WIDE_INT) -1;
3243 }
3244
3245 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3246 {
3247 if (TREE_UNSIGNED (etype))
3248 {
3249 etype = (*lang_hooks.types.signed_type) (etype);
3250 exp = convert (etype, exp);
3251 }
3252 return fold (build (GT_EXPR, type, exp,
3253 convert (etype, integer_zero_node)));
3254 }
3255 }
3256
3257 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3258 && ! TREE_OVERFLOW (value))
3259 return build_range_check (type,
3260 fold (build (MINUS_EXPR, etype, exp, low)),
3261 1, convert (etype, integer_zero_node), value);
3262
3263 return 0;
3264 }
3265 \f
3266 /* Given two ranges, see if we can merge them into one. Return 1 if we
3267 can, 0 if we can't. Set the output range into the specified parameters. */
3268
3269 static int
3270 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3271 tree high0, int in1_p, tree low1, tree high1)
3272 {
3273 int no_overlap;
3274 int subset;
3275 int temp;
3276 tree tem;
3277 int in_p;
3278 tree low, high;
3279 int lowequal = ((low0 == 0 && low1 == 0)
3280 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3281 low0, 0, low1, 0)));
3282 int highequal = ((high0 == 0 && high1 == 0)
3283 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3284 high0, 1, high1, 1)));
3285
3286 /* Make range 0 be the range that starts first, or ends last if they
3287 start at the same value. Swap them if it isn't. */
3288 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3289 low0, 0, low1, 0))
3290 || (lowequal
3291 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3292 high1, 1, high0, 1))))
3293 {
3294 temp = in0_p, in0_p = in1_p, in1_p = temp;
3295 tem = low0, low0 = low1, low1 = tem;
3296 tem = high0, high0 = high1, high1 = tem;
3297 }
3298
3299 /* Now flag two cases, whether the ranges are disjoint or whether the
3300 second range is totally subsumed in the first. Note that the tests
3301 below are simplified by the ones above. */
3302 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3303 high0, 1, low1, 0));
3304 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3305 high1, 1, high0, 1));
3306
3307 /* We now have four cases, depending on whether we are including or
3308 excluding the two ranges. */
3309 if (in0_p && in1_p)
3310 {
3311 /* If they don't overlap, the result is false. If the second range
3312 is a subset it is the result. Otherwise, the range is from the start
3313 of the second to the end of the first. */
3314 if (no_overlap)
3315 in_p = 0, low = high = 0;
3316 else if (subset)
3317 in_p = 1, low = low1, high = high1;
3318 else
3319 in_p = 1, low = low1, high = high0;
3320 }
3321
3322 else if (in0_p && ! in1_p)
3323 {
3324 /* If they don't overlap, the result is the first range. If they are
3325 equal, the result is false. If the second range is a subset of the
3326 first, and the ranges begin at the same place, we go from just after
3327 the end of the first range to the end of the second. If the second
3328 range is not a subset of the first, or if it is a subset and both
3329 ranges end at the same place, the range starts at the start of the
3330 first range and ends just before the second range.
3331 Otherwise, we can't describe this as a single range. */
3332 if (no_overlap)
3333 in_p = 1, low = low0, high = high0;
3334 else if (lowequal && highequal)
3335 in_p = 0, low = high = 0;
3336 else if (subset && lowequal)
3337 {
3338 in_p = 1, high = high0;
3339 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3340 integer_one_node, 0);
3341 }
3342 else if (! subset || highequal)
3343 {
3344 in_p = 1, low = low0;
3345 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3346 integer_one_node, 0);
3347 }
3348 else
3349 return 0;
3350 }
3351
3352 else if (! in0_p && in1_p)
3353 {
3354 /* If they don't overlap, the result is the second range. If the second
3355 is a subset of the first, the result is false. Otherwise,
3356 the range starts just after the first range and ends at the
3357 end of the second. */
3358 if (no_overlap)
3359 in_p = 1, low = low1, high = high1;
3360 else if (subset || highequal)
3361 in_p = 0, low = high = 0;
3362 else
3363 {
3364 in_p = 1, high = high1;
3365 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3366 integer_one_node, 0);
3367 }
3368 }
3369
3370 else
3371 {
3372 /* The case where we are excluding both ranges. Here the complex case
3373 is if they don't overlap. In that case, the only time we have a
3374 range is if they are adjacent. If the second is a subset of the
3375 first, the result is the first. Otherwise, the range to exclude
3376 starts at the beginning of the first range and ends at the end of the
3377 second. */
3378 if (no_overlap)
3379 {
3380 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3381 range_binop (PLUS_EXPR, NULL_TREE,
3382 high0, 1,
3383 integer_one_node, 1),
3384 1, low1, 0)))
3385 in_p = 0, low = low0, high = high1;
3386 else
3387 return 0;
3388 }
3389 else if (subset)
3390 in_p = 0, low = low0, high = high0;
3391 else
3392 in_p = 0, low = low0, high = high1;
3393 }
3394
3395 *pin_p = in_p, *plow = low, *phigh = high;
3396 return 1;
3397 }
3398 \f
3399 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3400 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3401 #endif
3402
3403 /* EXP is some logical combination of boolean tests. See if we can
3404 merge it into some range test. Return the new tree if so. */
3405
3406 static tree
3407 fold_range_test (tree exp)
3408 {
3409 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3410 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3411 int in0_p, in1_p, in_p;
3412 tree low0, low1, low, high0, high1, high;
3413 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3414 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3415 tree tem;
3416
3417 /* If this is an OR operation, invert both sides; we will invert
3418 again at the end. */
3419 if (or_op)
3420 in0_p = ! in0_p, in1_p = ! in1_p;
3421
3422 /* If both expressions are the same, if we can merge the ranges, and we
3423 can build the range test, return it or it inverted. If one of the
3424 ranges is always true or always false, consider it to be the same
3425 expression as the other. */
3426 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3427 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3428 in1_p, low1, high1)
3429 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3430 lhs != 0 ? lhs
3431 : rhs != 0 ? rhs : integer_zero_node,
3432 in_p, low, high))))
3433 return or_op ? invert_truthvalue (tem) : tem;
3434
3435 /* On machines where the branch cost is expensive, if this is a
3436 short-circuited branch and the underlying object on both sides
3437 is the same, make a non-short-circuit operation. */
3438 else if (RANGE_TEST_NON_SHORT_CIRCUIT
3439 && lhs != 0 && rhs != 0
3440 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3441 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3442 && operand_equal_p (lhs, rhs, 0))
3443 {
3444 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3445 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3446 which cases we can't do this. */
3447 if (simple_operand_p (lhs))
3448 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3449 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3450 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3451 TREE_OPERAND (exp, 1));
3452
3453 else if ((*lang_hooks.decls.global_bindings_p) () == 0
3454 && ! CONTAINS_PLACEHOLDER_P (lhs))
3455 {
3456 tree common = save_expr (lhs);
3457
3458 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3459 or_op ? ! in0_p : in0_p,
3460 low0, high0))
3461 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3462 or_op ? ! in1_p : in1_p,
3463 low1, high1))))
3464 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3465 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3466 TREE_TYPE (exp), lhs, rhs);
3467 }
3468 }
3469
3470 return 0;
3471 }
3472 \f
3473 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3474 bit value. Arrange things so the extra bits will be set to zero if and
3475 only if C is signed-extended to its full width. If MASK is nonzero,
3476 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3477
3478 static tree
3479 unextend (tree c, int p, int unsignedp, tree mask)
3480 {
3481 tree type = TREE_TYPE (c);
3482 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3483 tree temp;
3484
3485 if (p == modesize || unsignedp)
3486 return c;
3487
3488 /* We work by getting just the sign bit into the low-order bit, then
3489 into the high-order bit, then sign-extend. We then XOR that value
3490 with C. */
3491 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3492 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3493
3494 /* We must use a signed type in order to get an arithmetic right shift.
3495 However, we must also avoid introducing accidental overflows, so that
3496 a subsequent call to integer_zerop will work. Hence we must
3497 do the type conversion here. At this point, the constant is either
3498 zero or one, and the conversion to a signed type can never overflow.
3499 We could get an overflow if this conversion is done anywhere else. */
3500 if (TREE_UNSIGNED (type))
3501 temp = convert ((*lang_hooks.types.signed_type) (type), temp);
3502
3503 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3504 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3505 if (mask != 0)
3506 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3507 /* If necessary, convert the type back to match the type of C. */
3508 if (TREE_UNSIGNED (type))
3509 temp = convert (type, temp);
3510
3511 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3512 }
3513 \f
3514 /* Find ways of folding logical expressions of LHS and RHS:
3515 Try to merge two comparisons to the same innermost item.
3516 Look for range tests like "ch >= '0' && ch <= '9'".
3517 Look for combinations of simple terms on machines with expensive branches
3518 and evaluate the RHS unconditionally.
3519
3520 For example, if we have p->a == 2 && p->b == 4 and we can make an
3521 object large enough to span both A and B, we can do this with a comparison
3522 against the object ANDed with the a mask.
3523
3524 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3525 operations to do this with one comparison.
3526
3527 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3528 function and the one above.
3529
3530 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3531 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3532
3533 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3534 two operands.
3535
3536 We return the simplified tree or 0 if no optimization is possible. */
3537
3538 static tree
3539 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
3540 {
3541 /* If this is the "or" of two comparisons, we can do something if
3542 the comparisons are NE_EXPR. If this is the "and", we can do something
3543 if the comparisons are EQ_EXPR. I.e.,
3544 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3545
3546 WANTED_CODE is this operation code. For single bit fields, we can
3547 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3548 comparison for one-bit fields. */
3549
3550 enum tree_code wanted_code;
3551 enum tree_code lcode, rcode;
3552 tree ll_arg, lr_arg, rl_arg, rr_arg;
3553 tree ll_inner, lr_inner, rl_inner, rr_inner;
3554 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3555 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3556 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3557 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3558 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3559 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3560 enum machine_mode lnmode, rnmode;
3561 tree ll_mask, lr_mask, rl_mask, rr_mask;
3562 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3563 tree l_const, r_const;
3564 tree lntype, rntype, result;
3565 int first_bit, end_bit;
3566 int volatilep;
3567
3568 /* Start by getting the comparison codes. Fail if anything is volatile.
3569 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3570 it were surrounded with a NE_EXPR. */
3571
3572 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3573 return 0;
3574
3575 lcode = TREE_CODE (lhs);
3576 rcode = TREE_CODE (rhs);
3577
3578 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3579 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3580
3581 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3582 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3583
3584 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3585 return 0;
3586
3587 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3588 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3589
3590 ll_arg = TREE_OPERAND (lhs, 0);
3591 lr_arg = TREE_OPERAND (lhs, 1);
3592 rl_arg = TREE_OPERAND (rhs, 0);
3593 rr_arg = TREE_OPERAND (rhs, 1);
3594
3595 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3596 if (simple_operand_p (ll_arg)
3597 && simple_operand_p (lr_arg)
3598 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3599 {
3600 int compcode;
3601
3602 if (operand_equal_p (ll_arg, rl_arg, 0)
3603 && operand_equal_p (lr_arg, rr_arg, 0))
3604 {
3605 int lcompcode, rcompcode;
3606
3607 lcompcode = comparison_to_compcode (lcode);
3608 rcompcode = comparison_to_compcode (rcode);
3609 compcode = (code == TRUTH_AND_EXPR)
3610 ? lcompcode & rcompcode
3611 : lcompcode | rcompcode;
3612 }
3613 else if (operand_equal_p (ll_arg, rr_arg, 0)
3614 && operand_equal_p (lr_arg, rl_arg, 0))
3615 {
3616 int lcompcode, rcompcode;
3617
3618 rcode = swap_tree_comparison (rcode);
3619 lcompcode = comparison_to_compcode (lcode);
3620 rcompcode = comparison_to_compcode (rcode);
3621 compcode = (code == TRUTH_AND_EXPR)
3622 ? lcompcode & rcompcode
3623 : lcompcode | rcompcode;
3624 }
3625 else
3626 compcode = -1;
3627
3628 if (compcode == COMPCODE_TRUE)
3629 return convert (truth_type, integer_one_node);
3630 else if (compcode == COMPCODE_FALSE)
3631 return convert (truth_type, integer_zero_node);
3632 else if (compcode != -1)
3633 return build (compcode_to_comparison (compcode),
3634 truth_type, ll_arg, lr_arg);
3635 }
3636
3637 /* If the RHS can be evaluated unconditionally and its operands are
3638 simple, it wins to evaluate the RHS unconditionally on machines
3639 with expensive branches. In this case, this isn't a comparison
3640 that can be merged. Avoid doing this if the RHS is a floating-point
3641 comparison since those can trap. */
3642
3643 if (BRANCH_COST >= 2
3644 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3645 && simple_operand_p (rl_arg)
3646 && simple_operand_p (rr_arg))
3647 {
3648 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
3649 if (code == TRUTH_OR_EXPR
3650 && lcode == NE_EXPR && integer_zerop (lr_arg)
3651 && rcode == NE_EXPR && integer_zerop (rr_arg)
3652 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3653 return build (NE_EXPR, truth_type,
3654 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3655 ll_arg, rl_arg),
3656 integer_zero_node);
3657
3658 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
3659 if (code == TRUTH_AND_EXPR
3660 && lcode == EQ_EXPR && integer_zerop (lr_arg)
3661 && rcode == EQ_EXPR && integer_zerop (rr_arg)
3662 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3663 return build (EQ_EXPR, truth_type,
3664 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3665 ll_arg, rl_arg),
3666 integer_zero_node);
3667
3668 return build (code, truth_type, lhs, rhs);
3669 }
3670
3671 /* See if the comparisons can be merged. Then get all the parameters for
3672 each side. */
3673
3674 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3675 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3676 return 0;
3677
3678 volatilep = 0;
3679 ll_inner = decode_field_reference (ll_arg,
3680 &ll_bitsize, &ll_bitpos, &ll_mode,
3681 &ll_unsignedp, &volatilep, &ll_mask,
3682 &ll_and_mask);
3683 lr_inner = decode_field_reference (lr_arg,
3684 &lr_bitsize, &lr_bitpos, &lr_mode,
3685 &lr_unsignedp, &volatilep, &lr_mask,
3686 &lr_and_mask);
3687 rl_inner = decode_field_reference (rl_arg,
3688 &rl_bitsize, &rl_bitpos, &rl_mode,
3689 &rl_unsignedp, &volatilep, &rl_mask,
3690 &rl_and_mask);
3691 rr_inner = decode_field_reference (rr_arg,
3692 &rr_bitsize, &rr_bitpos, &rr_mode,
3693 &rr_unsignedp, &volatilep, &rr_mask,
3694 &rr_and_mask);
3695
3696 /* It must be true that the inner operation on the lhs of each
3697 comparison must be the same if we are to be able to do anything.
3698 Then see if we have constants. If not, the same must be true for
3699 the rhs's. */
3700 if (volatilep || ll_inner == 0 || rl_inner == 0
3701 || ! operand_equal_p (ll_inner, rl_inner, 0))
3702 return 0;
3703
3704 if (TREE_CODE (lr_arg) == INTEGER_CST
3705 && TREE_CODE (rr_arg) == INTEGER_CST)
3706 l_const = lr_arg, r_const = rr_arg;
3707 else if (lr_inner == 0 || rr_inner == 0
3708 || ! operand_equal_p (lr_inner, rr_inner, 0))
3709 return 0;
3710 else
3711 l_const = r_const = 0;
3712
3713 /* If either comparison code is not correct for our logical operation,
3714 fail. However, we can convert a one-bit comparison against zero into
3715 the opposite comparison against that bit being set in the field. */
3716
3717 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3718 if (lcode != wanted_code)
3719 {
3720 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3721 {
3722 /* Make the left operand unsigned, since we are only interested
3723 in the value of one bit. Otherwise we are doing the wrong
3724 thing below. */
3725 ll_unsignedp = 1;
3726 l_const = ll_mask;
3727 }
3728 else
3729 return 0;
3730 }
3731
3732 /* This is analogous to the code for l_const above. */
3733 if (rcode != wanted_code)
3734 {
3735 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3736 {
3737 rl_unsignedp = 1;
3738 r_const = rl_mask;
3739 }
3740 else
3741 return 0;
3742 }
3743
3744 /* After this point all optimizations will generate bit-field
3745 references, which we might not want. */
3746 if (! (*lang_hooks.can_use_bit_fields_p) ())
3747 return 0;
3748
3749 /* See if we can find a mode that contains both fields being compared on
3750 the left. If we can't, fail. Otherwise, update all constants and masks
3751 to be relative to a field of that size. */
3752 first_bit = MIN (ll_bitpos, rl_bitpos);
3753 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3754 lnmode = get_best_mode (end_bit - first_bit, first_bit,
3755 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3756 volatilep);
3757 if (lnmode == VOIDmode)
3758 return 0;
3759
3760 lnbitsize = GET_MODE_BITSIZE (lnmode);
3761 lnbitpos = first_bit & ~ (lnbitsize - 1);
3762 lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
3763 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3764
3765 if (BYTES_BIG_ENDIAN)
3766 {
3767 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3768 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3769 }
3770
3771 ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
3772 size_int (xll_bitpos), 0);
3773 rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
3774 size_int (xrl_bitpos), 0);
3775
3776 if (l_const)
3777 {
3778 l_const = convert (lntype, l_const);
3779 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3780 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3781 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3782 fold (build1 (BIT_NOT_EXPR,
3783 lntype, ll_mask)),
3784 0)))
3785 {
3786 warning ("comparison is always %d", wanted_code == NE_EXPR);
3787
3788 return convert (truth_type,
3789 wanted_code == NE_EXPR
3790 ? integer_one_node : integer_zero_node);
3791 }
3792 }
3793 if (r_const)
3794 {
3795 r_const = convert (lntype, r_const);
3796 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3797 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3798 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3799 fold (build1 (BIT_NOT_EXPR,
3800 lntype, rl_mask)),
3801 0)))
3802 {
3803 warning ("comparison is always %d", wanted_code == NE_EXPR);
3804
3805 return convert (truth_type,
3806 wanted_code == NE_EXPR
3807 ? integer_one_node : integer_zero_node);
3808 }
3809 }
3810
3811 /* If the right sides are not constant, do the same for it. Also,
3812 disallow this optimization if a size or signedness mismatch occurs
3813 between the left and right sides. */
3814 if (l_const == 0)
3815 {
3816 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3817 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3818 /* Make sure the two fields on the right
3819 correspond to the left without being swapped. */
3820 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3821 return 0;
3822
3823 first_bit = MIN (lr_bitpos, rr_bitpos);
3824 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3825 rnmode = get_best_mode (end_bit - first_bit, first_bit,
3826 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3827 volatilep);
3828 if (rnmode == VOIDmode)
3829 return 0;
3830
3831 rnbitsize = GET_MODE_BITSIZE (rnmode);
3832 rnbitpos = first_bit & ~ (rnbitsize - 1);
3833 rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
3834 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3835
3836 if (BYTES_BIG_ENDIAN)
3837 {
3838 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3839 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3840 }
3841
3842 lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
3843 size_int (xlr_bitpos), 0);
3844 rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
3845 size_int (xrr_bitpos), 0);
3846
3847 /* Make a mask that corresponds to both fields being compared.
3848 Do this for both items being compared. If the operands are the
3849 same size and the bits being compared are in the same position
3850 then we can do this by masking both and comparing the masked
3851 results. */
3852 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3853 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3854 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
3855 {
3856 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3857 ll_unsignedp || rl_unsignedp);
3858 if (! all_ones_mask_p (ll_mask, lnbitsize))
3859 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
3860
3861 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
3862 lr_unsignedp || rr_unsignedp);
3863 if (! all_ones_mask_p (lr_mask, rnbitsize))
3864 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
3865
3866 return build (wanted_code, truth_type, lhs, rhs);
3867 }
3868
3869 /* There is still another way we can do something: If both pairs of
3870 fields being compared are adjacent, we may be able to make a wider
3871 field containing them both.
3872
3873 Note that we still must mask the lhs/rhs expressions. Furthermore,
3874 the mask must be shifted to account for the shift done by
3875 make_bit_field_ref. */
3876 if ((ll_bitsize + ll_bitpos == rl_bitpos
3877 && lr_bitsize + lr_bitpos == rr_bitpos)
3878 || (ll_bitpos == rl_bitpos + rl_bitsize
3879 && lr_bitpos == rr_bitpos + rr_bitsize))
3880 {
3881 tree type;
3882
3883 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
3884 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
3885 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
3886 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
3887
3888 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
3889 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
3890 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
3891 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
3892
3893 /* Convert to the smaller type before masking out unwanted bits. */
3894 type = lntype;
3895 if (lntype != rntype)
3896 {
3897 if (lnbitsize > rnbitsize)
3898 {
3899 lhs = convert (rntype, lhs);
3900 ll_mask = convert (rntype, ll_mask);
3901 type = rntype;
3902 }
3903 else if (lnbitsize < rnbitsize)
3904 {
3905 rhs = convert (lntype, rhs);
3906 lr_mask = convert (lntype, lr_mask);
3907 type = lntype;
3908 }
3909 }
3910
3911 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
3912 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3913
3914 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
3915 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
3916
3917 return build (wanted_code, truth_type, lhs, rhs);
3918 }
3919
3920 return 0;
3921 }
3922
3923 /* Handle the case of comparisons with constants. If there is something in
3924 common between the masks, those bits of the constants must be the same.
3925 If not, the condition is always false. Test for this to avoid generating
3926 incorrect code below. */
3927 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3928 if (! integer_zerop (result)
3929 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3930 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3931 {
3932 if (wanted_code == NE_EXPR)
3933 {
3934 warning ("`or' of unmatched not-equal tests is always 1");
3935 return convert (truth_type, integer_one_node);
3936 }
3937 else
3938 {
3939 warning ("`and' of mutually exclusive equal-tests is always 0");
3940 return convert (truth_type, integer_zero_node);
3941 }
3942 }
3943
3944 /* Construct the expression we will return. First get the component
3945 reference we will make. Unless the mask is all ones the width of
3946 that field, perform the mask operation. Then compare with the
3947 merged constant. */
3948 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3949 ll_unsignedp || rl_unsignedp);
3950
3951 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3952 if (! all_ones_mask_p (ll_mask, lnbitsize))
3953 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
3954
3955 return build (wanted_code, truth_type, result,
3956 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
3957 }
3958 \f
3959 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
3960 constant. */
3961
3962 static tree
3963 optimize_minmax_comparison (tree t)
3964 {
3965 tree type = TREE_TYPE (t);
3966 tree arg0 = TREE_OPERAND (t, 0);
3967 enum tree_code op_code;
3968 tree comp_const = TREE_OPERAND (t, 1);
3969 tree minmax_const;
3970 int consts_equal, consts_lt;
3971 tree inner;
3972
3973 STRIP_SIGN_NOPS (arg0);
3974
3975 op_code = TREE_CODE (arg0);
3976 minmax_const = TREE_OPERAND (arg0, 1);
3977 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
3978 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
3979 inner = TREE_OPERAND (arg0, 0);
3980
3981 /* If something does not permit us to optimize, return the original tree. */
3982 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
3983 || TREE_CODE (comp_const) != INTEGER_CST
3984 || TREE_CONSTANT_OVERFLOW (comp_const)
3985 || TREE_CODE (minmax_const) != INTEGER_CST
3986 || TREE_CONSTANT_OVERFLOW (minmax_const))
3987 return t;
3988
3989 /* Now handle all the various comparison codes. We only handle EQ_EXPR
3990 and GT_EXPR, doing the rest with recursive calls using logical
3991 simplifications. */
3992 switch (TREE_CODE (t))
3993 {
3994 case NE_EXPR: case LT_EXPR: case LE_EXPR:
3995 return
3996 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
3997
3998 case GE_EXPR:
3999 return
4000 fold (build (TRUTH_ORIF_EXPR, type,
4001 optimize_minmax_comparison
4002 (build (EQ_EXPR, type, arg0, comp_const)),
4003 optimize_minmax_comparison
4004 (build (GT_EXPR, type, arg0, comp_const))));
4005
4006 case EQ_EXPR:
4007 if (op_code == MAX_EXPR && consts_equal)
4008 /* MAX (X, 0) == 0 -> X <= 0 */
4009 return fold (build (LE_EXPR, type, inner, comp_const));
4010
4011 else if (op_code == MAX_EXPR && consts_lt)
4012 /* MAX (X, 0) == 5 -> X == 5 */
4013 return fold (build (EQ_EXPR, type, inner, comp_const));
4014
4015 else if (op_code == MAX_EXPR)
4016 /* MAX (X, 0) == -1 -> false */
4017 return omit_one_operand (type, integer_zero_node, inner);
4018
4019 else if (consts_equal)
4020 /* MIN (X, 0) == 0 -> X >= 0 */
4021 return fold (build (GE_EXPR, type, inner, comp_const));
4022
4023 else if (consts_lt)
4024 /* MIN (X, 0) == 5 -> false */
4025 return omit_one_operand (type, integer_zero_node, inner);
4026
4027 else
4028 /* MIN (X, 0) == -1 -> X == -1 */
4029 return fold (build (EQ_EXPR, type, inner, comp_const));
4030
4031 case GT_EXPR:
4032 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4033 /* MAX (X, 0) > 0 -> X > 0
4034 MAX (X, 0) > 5 -> X > 5 */
4035 return fold (build (GT_EXPR, type, inner, comp_const));
4036
4037 else if (op_code == MAX_EXPR)
4038 /* MAX (X, 0) > -1 -> true */
4039 return omit_one_operand (type, integer_one_node, inner);
4040
4041 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4042 /* MIN (X, 0) > 0 -> false
4043 MIN (X, 0) > 5 -> false */
4044 return omit_one_operand (type, integer_zero_node, inner);
4045
4046 else
4047 /* MIN (X, 0) > -1 -> X > -1 */
4048 return fold (build (GT_EXPR, type, inner, comp_const));
4049
4050 default:
4051 return t;
4052 }
4053 }
4054 \f
4055 /* T is an integer expression that is being multiplied, divided, or taken a
4056 modulus (CODE says which and what kind of divide or modulus) by a
4057 constant C. See if we can eliminate that operation by folding it with
4058 other operations already in T. WIDE_TYPE, if non-null, is a type that
4059 should be used for the computation if wider than our type.
4060
4061 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4062 (X * 2) + (Y * 4). We must, however, be assured that either the original
4063 expression would not overflow or that overflow is undefined for the type
4064 in the language in question.
4065
4066 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4067 the machine has a multiply-accumulate insn or that this is part of an
4068 addressing calculation.
4069
4070 If we return a non-null expression, it is an equivalent form of the
4071 original computation, but need not be in the original type. */
4072
4073 static tree
4074 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4075 {
4076 /* To avoid exponential search depth, refuse to allow recursion past
4077 three levels. Beyond that (1) it's highly unlikely that we'll find
4078 something interesting and (2) we've probably processed it before
4079 when we built the inner expression. */
4080
4081 static int depth;
4082 tree ret;
4083
4084 if (depth > 3)
4085 return NULL;
4086
4087 depth++;
4088 ret = extract_muldiv_1 (t, c, code, wide_type);
4089 depth--;
4090
4091 return ret;
4092 }
4093
4094 static tree
4095 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4096 {
4097 tree type = TREE_TYPE (t);
4098 enum tree_code tcode = TREE_CODE (t);
4099 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4100 > GET_MODE_SIZE (TYPE_MODE (type)))
4101 ? wide_type : type);
4102 tree t1, t2;
4103 int same_p = tcode == code;
4104 tree op0 = NULL_TREE, op1 = NULL_TREE;
4105
4106 /* Don't deal with constants of zero here; they confuse the code below. */
4107 if (integer_zerop (c))
4108 return NULL_TREE;
4109
4110 if (TREE_CODE_CLASS (tcode) == '1')
4111 op0 = TREE_OPERAND (t, 0);
4112
4113 if (TREE_CODE_CLASS (tcode) == '2')
4114 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4115
4116 /* Note that we need not handle conditional operations here since fold
4117 already handles those cases. So just do arithmetic here. */
4118 switch (tcode)
4119 {
4120 case INTEGER_CST:
4121 /* For a constant, we can always simplify if we are a multiply
4122 or (for divide and modulus) if it is a multiple of our constant. */
4123 if (code == MULT_EXPR
4124 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4125 return const_binop (code, convert (ctype, t), convert (ctype, c), 0);
4126 break;
4127
4128 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4129 /* If op0 is an expression ... */
4130 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4131 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4132 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4133 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4134 /* ... and is unsigned, and its type is smaller than ctype,
4135 then we cannot pass through as widening. */
4136 && ((TREE_UNSIGNED (TREE_TYPE (op0))
4137 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4138 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4139 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4140 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4141 /* ... or its type is larger than ctype,
4142 then we cannot pass through this truncation. */
4143 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4144 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4145 /* ... or signedness changes for division or modulus,
4146 then we cannot pass through this conversion. */
4147 || (code != MULT_EXPR
4148 && (TREE_UNSIGNED (ctype)
4149 != TREE_UNSIGNED (TREE_TYPE (op0))))))
4150 break;
4151
4152 /* Pass the constant down and see if we can make a simplification. If
4153 we can, replace this expression with the inner simplification for
4154 possible later conversion to our or some other type. */
4155 if ((t2 = convert (TREE_TYPE (op0), c)) != 0
4156 && TREE_CODE (t2) == INTEGER_CST
4157 && ! TREE_CONSTANT_OVERFLOW (t2)
4158 && (0 != (t1 = extract_muldiv (op0, t2, code,
4159 code == MULT_EXPR
4160 ? ctype : NULL_TREE))))
4161 return t1;
4162 break;
4163
4164 case NEGATE_EXPR: case ABS_EXPR:
4165 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4166 return fold (build1 (tcode, ctype, convert (ctype, t1)));
4167 break;
4168
4169 case MIN_EXPR: case MAX_EXPR:
4170 /* If widening the type changes the signedness, then we can't perform
4171 this optimization as that changes the result. */
4172 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4173 break;
4174
4175 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4176 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4177 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4178 {
4179 if (tree_int_cst_sgn (c) < 0)
4180 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4181
4182 return fold (build (tcode, ctype, convert (ctype, t1),
4183 convert (ctype, t2)));
4184 }
4185 break;
4186
4187 case WITH_RECORD_EXPR:
4188 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4189 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4190 TREE_OPERAND (t, 1));
4191 break;
4192
4193 case LSHIFT_EXPR: case RSHIFT_EXPR:
4194 /* If the second operand is constant, this is a multiplication
4195 or floor division, by a power of two, so we can treat it that
4196 way unless the multiplier or divisor overflows. */
4197 if (TREE_CODE (op1) == INTEGER_CST
4198 /* const_binop may not detect overflow correctly,
4199 so check for it explicitly here. */
4200 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4201 && TREE_INT_CST_HIGH (op1) == 0
4202 && 0 != (t1 = convert (ctype,
4203 const_binop (LSHIFT_EXPR, size_one_node,
4204 op1, 0)))
4205 && ! TREE_OVERFLOW (t1))
4206 return extract_muldiv (build (tcode == LSHIFT_EXPR
4207 ? MULT_EXPR : FLOOR_DIV_EXPR,
4208 ctype, convert (ctype, op0), t1),
4209 c, code, wide_type);
4210 break;
4211
4212 case PLUS_EXPR: case MINUS_EXPR:
4213 /* See if we can eliminate the operation on both sides. If we can, we
4214 can return a new PLUS or MINUS. If we can't, the only remaining
4215 cases where we can do anything are if the second operand is a
4216 constant. */
4217 t1 = extract_muldiv (op0, c, code, wide_type);
4218 t2 = extract_muldiv (op1, c, code, wide_type);
4219 if (t1 != 0 && t2 != 0
4220 && (code == MULT_EXPR
4221 /* If not multiplication, we can only do this if both operands
4222 are divisible by c. */
4223 || (multiple_of_p (ctype, op0, c)
4224 && multiple_of_p (ctype, op1, c))))
4225 return fold (build (tcode, ctype, convert (ctype, t1),
4226 convert (ctype, t2)));
4227
4228 /* If this was a subtraction, negate OP1 and set it to be an addition.
4229 This simplifies the logic below. */
4230 if (tcode == MINUS_EXPR)
4231 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4232
4233 if (TREE_CODE (op1) != INTEGER_CST)
4234 break;
4235
4236 /* If either OP1 or C are negative, this optimization is not safe for
4237 some of the division and remainder types while for others we need
4238 to change the code. */
4239 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4240 {
4241 if (code == CEIL_DIV_EXPR)
4242 code = FLOOR_DIV_EXPR;
4243 else if (code == FLOOR_DIV_EXPR)
4244 code = CEIL_DIV_EXPR;
4245 else if (code != MULT_EXPR
4246 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4247 break;
4248 }
4249
4250 /* If it's a multiply or a division/modulus operation of a multiple
4251 of our constant, do the operation and verify it doesn't overflow. */
4252 if (code == MULT_EXPR
4253 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4254 {
4255 op1 = const_binop (code, convert (ctype, op1), convert (ctype, c), 0);
4256 if (op1 == 0 || TREE_OVERFLOW (op1))
4257 break;
4258 }
4259 else
4260 break;
4261
4262 /* If we have an unsigned type is not a sizetype, we cannot widen
4263 the operation since it will change the result if the original
4264 computation overflowed. */
4265 if (TREE_UNSIGNED (ctype)
4266 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4267 && ctype != type)
4268 break;
4269
4270 /* If we were able to eliminate our operation from the first side,
4271 apply our operation to the second side and reform the PLUS. */
4272 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4273 return fold (build (tcode, ctype, convert (ctype, t1), op1));
4274
4275 /* The last case is if we are a multiply. In that case, we can
4276 apply the distributive law to commute the multiply and addition
4277 if the multiplication of the constants doesn't overflow. */
4278 if (code == MULT_EXPR)
4279 return fold (build (tcode, ctype, fold (build (code, ctype,
4280 convert (ctype, op0),
4281 convert (ctype, c))),
4282 op1));
4283
4284 break;
4285
4286 case MULT_EXPR:
4287 /* We have a special case here if we are doing something like
4288 (C * 8) % 4 since we know that's zero. */
4289 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4290 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4291 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4292 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4293 return omit_one_operand (type, integer_zero_node, op0);
4294
4295 /* ... fall through ... */
4296
4297 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4298 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4299 /* If we can extract our operation from the LHS, do so and return a
4300 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4301 do something only if the second operand is a constant. */
4302 if (same_p
4303 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4304 return fold (build (tcode, ctype, convert (ctype, t1),
4305 convert (ctype, op1)));
4306 else if (tcode == MULT_EXPR && code == MULT_EXPR
4307 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4308 return fold (build (tcode, ctype, convert (ctype, op0),
4309 convert (ctype, t1)));
4310 else if (TREE_CODE (op1) != INTEGER_CST)
4311 return 0;
4312
4313 /* If these are the same operation types, we can associate them
4314 assuming no overflow. */
4315 if (tcode == code
4316 && 0 != (t1 = const_binop (MULT_EXPR, convert (ctype, op1),
4317 convert (ctype, c), 0))
4318 && ! TREE_OVERFLOW (t1))
4319 return fold (build (tcode, ctype, convert (ctype, op0), t1));
4320
4321 /* If these operations "cancel" each other, we have the main
4322 optimizations of this pass, which occur when either constant is a
4323 multiple of the other, in which case we replace this with either an
4324 operation or CODE or TCODE.
4325
4326 If we have an unsigned type that is not a sizetype, we cannot do
4327 this since it will change the result if the original computation
4328 overflowed. */
4329 if ((! TREE_UNSIGNED (ctype)
4330 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4331 && ! flag_wrapv
4332 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4333 || (tcode == MULT_EXPR
4334 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4335 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4336 {
4337 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4338 return fold (build (tcode, ctype, convert (ctype, op0),
4339 convert (ctype,
4340 const_binop (TRUNC_DIV_EXPR,
4341 op1, c, 0))));
4342 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4343 return fold (build (code, ctype, convert (ctype, op0),
4344 convert (ctype,
4345 const_binop (TRUNC_DIV_EXPR,
4346 c, op1, 0))));
4347 }
4348 break;
4349
4350 default:
4351 break;
4352 }
4353
4354 return 0;
4355 }
4356 \f
4357 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4358 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4359 that we may sometimes modify the tree. */
4360
4361 static tree
4362 strip_compound_expr (tree t, tree s)
4363 {
4364 enum tree_code code = TREE_CODE (t);
4365
4366 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4367 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4368 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4369 return TREE_OPERAND (t, 1);
4370
4371 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4372 don't bother handling any other types. */
4373 else if (code == COND_EXPR)
4374 {
4375 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4376 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4377 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4378 }
4379 else if (TREE_CODE_CLASS (code) == '1')
4380 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4381 else if (TREE_CODE_CLASS (code) == '<'
4382 || TREE_CODE_CLASS (code) == '2')
4383 {
4384 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4385 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4386 }
4387
4388 return t;
4389 }
4390 \f
4391 /* Return a node which has the indicated constant VALUE (either 0 or
4392 1), and is of the indicated TYPE. */
4393
4394 static tree
4395 constant_boolean_node (int value, tree type)
4396 {
4397 if (type == integer_type_node)
4398 return value ? integer_one_node : integer_zero_node;
4399 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4400 return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4401 integer_zero_node);
4402 else
4403 {
4404 tree t = build_int_2 (value, 0);
4405
4406 TREE_TYPE (t) = type;
4407 return t;
4408 }
4409 }
4410
4411 /* Utility function for the following routine, to see how complex a nesting of
4412 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4413 we don't care (to avoid spending too much time on complex expressions.). */
4414
4415 static int
4416 count_cond (tree expr, int lim)
4417 {
4418 int ctrue, cfalse;
4419
4420 if (TREE_CODE (expr) != COND_EXPR)
4421 return 0;
4422 else if (lim <= 0)
4423 return 0;
4424
4425 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4426 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4427 return MIN (lim, 1 + ctrue + cfalse);
4428 }
4429
4430 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4431 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4432 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4433 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4434 COND is the first argument to CODE; otherwise (as in the example
4435 given here), it is the second argument. TYPE is the type of the
4436 original expression. */
4437
4438 static tree
4439 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
4440 tree cond, tree arg, int cond_first_p)
4441 {
4442 tree test, true_value, false_value;
4443 tree lhs = NULL_TREE;
4444 tree rhs = NULL_TREE;
4445 /* In the end, we'll produce a COND_EXPR. Both arms of the
4446 conditional expression will be binary operations. The left-hand
4447 side of the expression to be executed if the condition is true
4448 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4449 of the expression to be executed if the condition is true will be
4450 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4451 but apply to the expression to be executed if the conditional is
4452 false. */
4453 tree *true_lhs;
4454 tree *true_rhs;
4455 tree *false_lhs;
4456 tree *false_rhs;
4457 /* These are the codes to use for the left-hand side and right-hand
4458 side of the COND_EXPR. Normally, they are the same as CODE. */
4459 enum tree_code lhs_code = code;
4460 enum tree_code rhs_code = code;
4461 /* And these are the types of the expressions. */
4462 tree lhs_type = type;
4463 tree rhs_type = type;
4464 int save = 0;
4465
4466 if (cond_first_p)
4467 {
4468 true_rhs = false_rhs = &arg;
4469 true_lhs = &true_value;
4470 false_lhs = &false_value;
4471 }
4472 else
4473 {
4474 true_lhs = false_lhs = &arg;
4475 true_rhs = &true_value;
4476 false_rhs = &false_value;
4477 }
4478
4479 if (TREE_CODE (cond) == COND_EXPR)
4480 {
4481 test = TREE_OPERAND (cond, 0);
4482 true_value = TREE_OPERAND (cond, 1);
4483 false_value = TREE_OPERAND (cond, 2);
4484 /* If this operand throws an expression, then it does not make
4485 sense to try to perform a logical or arithmetic operation
4486 involving it. Instead of building `a + throw 3' for example,
4487 we simply build `a, throw 3'. */
4488 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4489 {
4490 if (! cond_first_p)
4491 {
4492 lhs_code = COMPOUND_EXPR;
4493 lhs_type = void_type_node;
4494 }
4495 else
4496 lhs = true_value;
4497 }
4498 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4499 {
4500 if (! cond_first_p)
4501 {
4502 rhs_code = COMPOUND_EXPR;
4503 rhs_type = void_type_node;
4504 }
4505 else
4506 rhs = false_value;
4507 }
4508 }
4509 else
4510 {
4511 tree testtype = TREE_TYPE (cond);
4512 test = cond;
4513 true_value = convert (testtype, integer_one_node);
4514 false_value = convert (testtype, integer_zero_node);
4515 }
4516
4517 /* If ARG is complex we want to make sure we only evaluate it once. Though
4518 this is only required if it is volatile, it might be more efficient even
4519 if it is not. However, if we succeed in folding one part to a constant,
4520 we do not need to make this SAVE_EXPR. Since we do this optimization
4521 primarily to see if we do end up with constant and this SAVE_EXPR
4522 interferes with later optimizations, suppressing it when we can is
4523 important.
4524
4525 If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4526 do so. Don't try to see if the result is a constant if an arm is a
4527 COND_EXPR since we get exponential behavior in that case. */
4528
4529 if (saved_expr_p (arg))
4530 save = 1;
4531 else if (lhs == 0 && rhs == 0
4532 && !TREE_CONSTANT (arg)
4533 && (*lang_hooks.decls.global_bindings_p) () == 0
4534 && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4535 || TREE_SIDE_EFFECTS (arg)))
4536 {
4537 if (TREE_CODE (true_value) != COND_EXPR)
4538 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4539
4540 if (TREE_CODE (false_value) != COND_EXPR)
4541 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4542
4543 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4544 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4545 {
4546 arg = save_expr (arg);
4547 lhs = rhs = 0;
4548 save = 1;
4549 }
4550 }
4551
4552 if (lhs == 0)
4553 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4554 if (rhs == 0)
4555 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4556
4557 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4558
4559 if (save)
4560 return build (COMPOUND_EXPR, type,
4561 convert (void_type_node, arg),
4562 strip_compound_expr (test, arg));
4563 else
4564 return convert (type, test);
4565 }
4566
4567 \f
4568 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4569
4570 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4571 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4572 ADDEND is the same as X.
4573
4574 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4575 and finite. The problematic cases are when X is zero, and its mode
4576 has signed zeros. In the case of rounding towards -infinity,
4577 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4578 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4579
4580 static bool
4581 fold_real_zero_addition_p (tree type, tree addend, int negate)
4582 {
4583 if (!real_zerop (addend))
4584 return false;
4585
4586 /* Don't allow the fold with -fsignaling-nans. */
4587 if (HONOR_SNANS (TYPE_MODE (type)))
4588 return false;
4589
4590 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4591 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4592 return true;
4593
4594 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4595 if (TREE_CODE (addend) == REAL_CST
4596 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4597 negate = !negate;
4598
4599 /* The mode has signed zeros, and we have to honor their sign.
4600 In this situation, there is only one case we can return true for.
4601 X - 0 is the same as X unless rounding towards -infinity is
4602 supported. */
4603 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4604 }
4605
4606 /* Subroutine of fold() that checks comparisons of built-in math
4607 functions against real constants.
4608
4609 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
4610 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
4611 is the type of the result and ARG0 and ARG1 are the operands of the
4612 comparison. ARG1 must be a TREE_REAL_CST.
4613
4614 The function returns the constant folded tree if a simplification
4615 can be made, and NULL_TREE otherwise. */
4616
4617 static tree
4618 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
4619 tree type, tree arg0, tree arg1)
4620 {
4621 REAL_VALUE_TYPE c;
4622
4623 if (fcode == BUILT_IN_SQRT
4624 || fcode == BUILT_IN_SQRTF
4625 || fcode == BUILT_IN_SQRTL)
4626 {
4627 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
4628 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
4629
4630 c = TREE_REAL_CST (arg1);
4631 if (REAL_VALUE_NEGATIVE (c))
4632 {
4633 /* sqrt(x) < y is always false, if y is negative. */
4634 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
4635 return omit_one_operand (type,
4636 convert (type, integer_zero_node),
4637 arg);
4638
4639 /* sqrt(x) > y is always true, if y is negative and we
4640 don't care about NaNs, i.e. negative values of x. */
4641 if (code == NE_EXPR || !HONOR_NANS (mode))
4642 return omit_one_operand (type,
4643 convert (type, integer_one_node),
4644 arg);
4645
4646 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
4647 return fold (build (GE_EXPR, type, arg,
4648 build_real (TREE_TYPE (arg), dconst0)));
4649 }
4650 else if (code == GT_EXPR || code == GE_EXPR)
4651 {
4652 REAL_VALUE_TYPE c2;
4653
4654 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4655 real_convert (&c2, mode, &c2);
4656
4657 if (REAL_VALUE_ISINF (c2))
4658 {
4659 /* sqrt(x) > y is x == +Inf, when y is very large. */
4660 if (HONOR_INFINITIES (mode))
4661 return fold (build (EQ_EXPR, type, arg,
4662 build_real (TREE_TYPE (arg), c2)));
4663
4664 /* sqrt(x) > y is always false, when y is very large
4665 and we don't care about infinities. */
4666 return omit_one_operand (type,
4667 convert (type, integer_zero_node),
4668 arg);
4669 }
4670
4671 /* sqrt(x) > c is the same as x > c*c. */
4672 return fold (build (code, type, arg,
4673 build_real (TREE_TYPE (arg), c2)));
4674 }
4675 else if (code == LT_EXPR || code == LE_EXPR)
4676 {
4677 REAL_VALUE_TYPE c2;
4678
4679 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4680 real_convert (&c2, mode, &c2);
4681
4682 if (REAL_VALUE_ISINF (c2))
4683 {
4684 /* sqrt(x) < y is always true, when y is a very large
4685 value and we don't care about NaNs or Infinities. */
4686 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
4687 return omit_one_operand (type,
4688 convert (type, integer_one_node),
4689 arg);
4690
4691 /* sqrt(x) < y is x != +Inf when y is very large and we
4692 don't care about NaNs. */
4693 if (! HONOR_NANS (mode))
4694 return fold (build (NE_EXPR, type, arg,
4695 build_real (TREE_TYPE (arg), c2)));
4696
4697 /* sqrt(x) < y is x >= 0 when y is very large and we
4698 don't care about Infinities. */
4699 if (! HONOR_INFINITIES (mode))
4700 return fold (build (GE_EXPR, type, arg,
4701 build_real (TREE_TYPE (arg), dconst0)));
4702
4703 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
4704 if ((*lang_hooks.decls.global_bindings_p) () != 0
4705 || CONTAINS_PLACEHOLDER_P (arg))
4706 return NULL_TREE;
4707
4708 arg = save_expr (arg);
4709 return fold (build (TRUTH_ANDIF_EXPR, type,
4710 fold (build (GE_EXPR, type, arg,
4711 build_real (TREE_TYPE (arg),
4712 dconst0))),
4713 fold (build (NE_EXPR, type, arg,
4714 build_real (TREE_TYPE (arg),
4715 c2)))));
4716 }
4717
4718 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
4719 if (! HONOR_NANS (mode))
4720 return fold (build (code, type, arg,
4721 build_real (TREE_TYPE (arg), c2)));
4722
4723 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
4724 if ((*lang_hooks.decls.global_bindings_p) () == 0
4725 && ! CONTAINS_PLACEHOLDER_P (arg))
4726 {
4727 arg = save_expr (arg);
4728 return fold (build (TRUTH_ANDIF_EXPR, type,
4729 fold (build (GE_EXPR, type, arg,
4730 build_real (TREE_TYPE (arg),
4731 dconst0))),
4732 fold (build (code, type, arg,
4733 build_real (TREE_TYPE (arg),
4734 c2)))));
4735 }
4736 }
4737 }
4738
4739 return NULL_TREE;
4740 }
4741
4742 /* Subroutine of fold() that optimizes comparisons against Infinities,
4743 either +Inf or -Inf.
4744
4745 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
4746 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
4747 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
4748
4749 The function returns the constant folded tree if a simplification
4750 can be made, and NULL_TREE otherwise. */
4751
4752 static tree
4753 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
4754 {
4755 enum machine_mode mode;
4756 REAL_VALUE_TYPE max;
4757 tree temp;
4758 bool neg;
4759
4760 mode = TYPE_MODE (TREE_TYPE (arg0));
4761
4762 /* For negative infinity swap the sense of the comparison. */
4763 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
4764 if (neg)
4765 code = swap_tree_comparison (code);
4766
4767 switch (code)
4768 {
4769 case GT_EXPR:
4770 /* x > +Inf is always false, if with ignore sNANs. */
4771 if (HONOR_SNANS (mode))
4772 return NULL_TREE;
4773 return omit_one_operand (type,
4774 convert (type, integer_zero_node),
4775 arg0);
4776
4777 case LE_EXPR:
4778 /* x <= +Inf is always true, if we don't case about NaNs. */
4779 if (! HONOR_NANS (mode))
4780 return omit_one_operand (type,
4781 convert (type, integer_one_node),
4782 arg0);
4783
4784 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
4785 if ((*lang_hooks.decls.global_bindings_p) () == 0
4786 && ! CONTAINS_PLACEHOLDER_P (arg0))
4787 {
4788 arg0 = save_expr (arg0);
4789 return fold (build (EQ_EXPR, type, arg0, arg0));
4790 }
4791 break;
4792
4793 case EQ_EXPR:
4794 case GE_EXPR:
4795 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
4796 real_maxval (&max, neg, mode);
4797 return fold (build (neg ? LT_EXPR : GT_EXPR, type,
4798 arg0, build_real (TREE_TYPE (arg0), max)));
4799
4800 case LT_EXPR:
4801 /* x < +Inf is always equal to x <= DBL_MAX. */
4802 real_maxval (&max, neg, mode);
4803 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
4804 arg0, build_real (TREE_TYPE (arg0), max)));
4805
4806 case NE_EXPR:
4807 /* x != +Inf is always equal to !(x > DBL_MAX). */
4808 real_maxval (&max, neg, mode);
4809 if (! HONOR_NANS (mode))
4810 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
4811 arg0, build_real (TREE_TYPE (arg0), max)));
4812 temp = fold (build (neg ? LT_EXPR : GT_EXPR, type,
4813 arg0, build_real (TREE_TYPE (arg0), max)));
4814 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
4815
4816 default:
4817 break;
4818 }
4819
4820 return NULL_TREE;
4821 }
4822
4823 /* If CODE with arguments ARG0 and ARG1 represents a single bit
4824 equality/inequality test, then return a simplified form of
4825 the test using shifts and logical operations. Otherwise return
4826 NULL. TYPE is the desired result type. */
4827
4828 tree
4829 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
4830 tree result_type)
4831 {
4832 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
4833 operand 0. */
4834 if (code == TRUTH_NOT_EXPR)
4835 {
4836 code = TREE_CODE (arg0);
4837 if (code != NE_EXPR && code != EQ_EXPR)
4838 return NULL_TREE;
4839
4840 /* Extract the arguments of the EQ/NE. */
4841 arg1 = TREE_OPERAND (arg0, 1);
4842 arg0 = TREE_OPERAND (arg0, 0);
4843
4844 /* This requires us to invert the code. */
4845 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
4846 }
4847
4848 /* If this is testing a single bit, we can optimize the test. */
4849 if ((code == NE_EXPR || code == EQ_EXPR)
4850 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
4851 && integer_pow2p (TREE_OPERAND (arg0, 1)))
4852 {
4853 tree inner = TREE_OPERAND (arg0, 0);
4854 tree type = TREE_TYPE (arg0);
4855 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
4856 enum machine_mode operand_mode = TYPE_MODE (type);
4857 int ops_unsigned;
4858 tree signed_type, unsigned_type;
4859 tree arg00;
4860
4861 /* If we have (A & C) != 0 where C is the sign bit of A, convert
4862 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
4863 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
4864 if (arg00 != NULL_TREE)
4865 {
4866 tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
4867 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, result_type,
4868 convert (stype, arg00),
4869 convert (stype, integer_zero_node)));
4870 }
4871
4872 /* At this point, we know that arg0 is not testing the sign bit. */
4873 if (TYPE_PRECISION (type) - 1 == bitnum)
4874 abort ();
4875
4876 /* Otherwise we have (A & C) != 0 where C is a single bit,
4877 convert that into ((A >> C2) & 1). Where C2 = log2(C).
4878 Similarly for (A & C) == 0. */
4879
4880 /* If INNER is a right shift of a constant and it plus BITNUM does
4881 not overflow, adjust BITNUM and INNER. */
4882 if (TREE_CODE (inner) == RSHIFT_EXPR
4883 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
4884 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
4885 && bitnum < TYPE_PRECISION (type)
4886 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
4887 bitnum - TYPE_PRECISION (type)))
4888 {
4889 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
4890 inner = TREE_OPERAND (inner, 0);
4891 }
4892
4893 /* If we are going to be able to omit the AND below, we must do our
4894 operations as unsigned. If we must use the AND, we have a choice.
4895 Normally unsigned is faster, but for some machines signed is. */
4896 #ifdef LOAD_EXTEND_OP
4897 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
4898 #else
4899 ops_unsigned = 1;
4900 #endif
4901
4902 signed_type = (*lang_hooks.types.type_for_mode) (operand_mode, 0);
4903 unsigned_type = (*lang_hooks.types.type_for_mode) (operand_mode, 1);
4904
4905 if (bitnum != 0)
4906 inner = build (RSHIFT_EXPR, ops_unsigned ? unsigned_type : signed_type,
4907 inner, size_int (bitnum));
4908
4909 if (code == EQ_EXPR)
4910 inner = build (BIT_XOR_EXPR, ops_unsigned ? unsigned_type : signed_type,
4911 inner, integer_one_node);
4912
4913 /* Put the AND last so it can combine with more things. */
4914 inner = build (BIT_AND_EXPR, ops_unsigned ? unsigned_type : signed_type,
4915 inner, integer_one_node);
4916
4917 /* Make sure to return the proper type. */
4918 if (TREE_TYPE (inner) != result_type)
4919 inner = convert (result_type, inner);
4920
4921 return inner;
4922 }
4923 return NULL_TREE;
4924 }
4925
4926 /* Perform constant folding and related simplification of EXPR.
4927 The related simplifications include x*1 => x, x*0 => 0, etc.,
4928 and application of the associative law.
4929 NOP_EXPR conversions may be removed freely (as long as we
4930 are careful not to change the C type of the overall expression)
4931 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4932 but we can constant-fold them if they have constant operands. */
4933
4934 #ifdef ENABLE_FOLD_CHECKING
4935 # define fold(x) fold_1 (x)
4936 static tree fold_1 (tree);
4937 static
4938 #endif
4939 tree
4940 fold (tree expr)
4941 {
4942 tree t = expr, orig_t;
4943 tree t1 = NULL_TREE;
4944 tree tem;
4945 tree type = TREE_TYPE (expr);
4946 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4947 enum tree_code code = TREE_CODE (t);
4948 int kind = TREE_CODE_CLASS (code);
4949 int invert;
4950 /* WINS will be nonzero when the switch is done
4951 if all operands are constant. */
4952 int wins = 1;
4953
4954 /* Don't try to process an RTL_EXPR since its operands aren't trees.
4955 Likewise for a SAVE_EXPR that's already been evaluated. */
4956 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
4957 return t;
4958
4959 /* Return right away if a constant. */
4960 if (kind == 'c')
4961 return t;
4962
4963 #ifdef MAX_INTEGER_COMPUTATION_MODE
4964 check_max_integer_computation_mode (expr);
4965 #endif
4966 orig_t = t;
4967
4968 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
4969 {
4970 tree subop;
4971
4972 /* Special case for conversion ops that can have fixed point args. */
4973 arg0 = TREE_OPERAND (t, 0);
4974
4975 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
4976 if (arg0 != 0)
4977 STRIP_SIGN_NOPS (arg0);
4978
4979 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
4980 subop = TREE_REALPART (arg0);
4981 else
4982 subop = arg0;
4983
4984 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
4985 && TREE_CODE (subop) != REAL_CST
4986 )
4987 /* Note that TREE_CONSTANT isn't enough:
4988 static var addresses are constant but we can't
4989 do arithmetic on them. */
4990 wins = 0;
4991 }
4992 else if (IS_EXPR_CODE_CLASS (kind) || kind == 'r')
4993 {
4994 int len = first_rtl_op (code);
4995 int i;
4996 for (i = 0; i < len; i++)
4997 {
4998 tree op = TREE_OPERAND (t, i);
4999 tree subop;
5000
5001 if (op == 0)
5002 continue; /* Valid for CALL_EXPR, at least. */
5003
5004 if (kind == '<' || code == RSHIFT_EXPR)
5005 {
5006 /* Signedness matters here. Perhaps we can refine this
5007 later. */
5008 STRIP_SIGN_NOPS (op);
5009 }
5010 else
5011 /* Strip any conversions that don't change the mode. */
5012 STRIP_NOPS (op);
5013
5014 if (TREE_CODE (op) == COMPLEX_CST)
5015 subop = TREE_REALPART (op);
5016 else
5017 subop = op;
5018
5019 if (TREE_CODE (subop) != INTEGER_CST
5020 && TREE_CODE (subop) != REAL_CST)
5021 /* Note that TREE_CONSTANT isn't enough:
5022 static var addresses are constant but we can't
5023 do arithmetic on them. */
5024 wins = 0;
5025
5026 if (i == 0)
5027 arg0 = op;
5028 else if (i == 1)
5029 arg1 = op;
5030 }
5031 }
5032
5033 /* If this is a commutative operation, and ARG0 is a constant, move it
5034 to ARG1 to reduce the number of tests below. */
5035 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
5036 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
5037 || code == BIT_AND_EXPR)
5038 && ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) != INTEGER_CST)
5039 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) != REAL_CST)))
5040 {
5041 tem = arg0; arg0 = arg1; arg1 = tem;
5042
5043 if (t == orig_t)
5044 t = copy_node (t);
5045 TREE_OPERAND (t, 0) = arg0;
5046 TREE_OPERAND (t, 1) = arg1;
5047 }
5048
5049 /* Now WINS is set as described above,
5050 ARG0 is the first operand of EXPR,
5051 and ARG1 is the second operand (if it has more than one operand).
5052
5053 First check for cases where an arithmetic operation is applied to a
5054 compound, conditional, or comparison operation. Push the arithmetic
5055 operation inside the compound or conditional to see if any folding
5056 can then be done. Convert comparison to conditional for this purpose.
5057 The also optimizes non-constant cases that used to be done in
5058 expand_expr.
5059
5060 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5061 one of the operands is a comparison and the other is a comparison, a
5062 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5063 code below would make the expression more complex. Change it to a
5064 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5065 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5066
5067 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5068 || code == EQ_EXPR || code == NE_EXPR)
5069 && ((truth_value_p (TREE_CODE (arg0))
5070 && (truth_value_p (TREE_CODE (arg1))
5071 || (TREE_CODE (arg1) == BIT_AND_EXPR
5072 && integer_onep (TREE_OPERAND (arg1, 1)))))
5073 || (truth_value_p (TREE_CODE (arg1))
5074 && (truth_value_p (TREE_CODE (arg0))
5075 || (TREE_CODE (arg0) == BIT_AND_EXPR
5076 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5077 {
5078 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5079 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5080 : TRUTH_XOR_EXPR,
5081 type, arg0, arg1));
5082
5083 if (code == EQ_EXPR)
5084 t = invert_truthvalue (t);
5085
5086 return t;
5087 }
5088
5089 if (TREE_CODE_CLASS (code) == '1')
5090 {
5091 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5092 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5093 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5094 else if (TREE_CODE (arg0) == COND_EXPR)
5095 {
5096 tree arg01 = TREE_OPERAND (arg0, 1);
5097 tree arg02 = TREE_OPERAND (arg0, 2);
5098 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5099 arg01 = fold (build1 (code, type, arg01));
5100 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5101 arg02 = fold (build1 (code, type, arg02));
5102 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5103 arg01, arg02));
5104
5105 /* If this was a conversion, and all we did was to move into
5106 inside the COND_EXPR, bring it back out. But leave it if
5107 it is a conversion from integer to integer and the
5108 result precision is no wider than a word since such a
5109 conversion is cheap and may be optimized away by combine,
5110 while it couldn't if it were outside the COND_EXPR. Then return
5111 so we don't get into an infinite recursion loop taking the
5112 conversion out and then back in. */
5113
5114 if ((code == NOP_EXPR || code == CONVERT_EXPR
5115 || code == NON_LVALUE_EXPR)
5116 && TREE_CODE (t) == COND_EXPR
5117 && TREE_CODE (TREE_OPERAND (t, 1)) == code
5118 && TREE_CODE (TREE_OPERAND (t, 2)) == code
5119 && ! VOID_TYPE_P (TREE_OPERAND (t, 1))
5120 && ! VOID_TYPE_P (TREE_OPERAND (t, 2))
5121 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
5122 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
5123 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
5124 && (INTEGRAL_TYPE_P
5125 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
5126 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
5127 t = build1 (code, type,
5128 build (COND_EXPR,
5129 TREE_TYPE (TREE_OPERAND
5130 (TREE_OPERAND (t, 1), 0)),
5131 TREE_OPERAND (t, 0),
5132 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
5133 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
5134 return t;
5135 }
5136 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5137 return fold (build (COND_EXPR, type, arg0,
5138 fold (build1 (code, type, integer_one_node)),
5139 fold (build1 (code, type, integer_zero_node))));
5140 }
5141 else if (TREE_CODE_CLASS (code) == '<'
5142 && TREE_CODE (arg0) == COMPOUND_EXPR)
5143 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5144 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5145 else if (TREE_CODE_CLASS (code) == '<'
5146 && TREE_CODE (arg1) == COMPOUND_EXPR)
5147 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5148 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5149 else if (TREE_CODE_CLASS (code) == '2'
5150 || TREE_CODE_CLASS (code) == '<')
5151 {
5152 if (TREE_CODE (arg1) == COMPOUND_EXPR
5153 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1, 0))
5154 && ! TREE_SIDE_EFFECTS (arg0))
5155 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5156 fold (build (code, type,
5157 arg0, TREE_OPERAND (arg1, 1))));
5158 else if ((TREE_CODE (arg1) == COND_EXPR
5159 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
5160 && TREE_CODE_CLASS (code) != '<'))
5161 && (TREE_CODE (arg0) != COND_EXPR
5162 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5163 && (! TREE_SIDE_EFFECTS (arg0)
5164 || ((*lang_hooks.decls.global_bindings_p) () == 0
5165 && ! CONTAINS_PLACEHOLDER_P (arg0))))
5166 return
5167 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5168 /*cond_first_p=*/0);
5169 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
5170 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5171 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5172 else if ((TREE_CODE (arg0) == COND_EXPR
5173 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5174 && TREE_CODE_CLASS (code) != '<'))
5175 && (TREE_CODE (arg1) != COND_EXPR
5176 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5177 && (! TREE_SIDE_EFFECTS (arg1)
5178 || ((*lang_hooks.decls.global_bindings_p) () == 0
5179 && ! CONTAINS_PLACEHOLDER_P (arg1))))
5180 return
5181 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5182 /*cond_first_p=*/1);
5183 }
5184
5185 switch (code)
5186 {
5187 case INTEGER_CST:
5188 case REAL_CST:
5189 case VECTOR_CST:
5190 case STRING_CST:
5191 case COMPLEX_CST:
5192 case CONSTRUCTOR:
5193 return t;
5194
5195 case CONST_DECL:
5196 return fold (DECL_INITIAL (t));
5197
5198 case NOP_EXPR:
5199 case FLOAT_EXPR:
5200 case CONVERT_EXPR:
5201 case FIX_TRUNC_EXPR:
5202 /* Other kinds of FIX are not handled properly by fold_convert. */
5203
5204 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
5205 return TREE_OPERAND (t, 0);
5206
5207 /* Handle cases of two conversions in a row. */
5208 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5209 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5210 {
5211 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5212 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5213 tree final_type = TREE_TYPE (t);
5214 int inside_int = INTEGRAL_TYPE_P (inside_type);
5215 int inside_ptr = POINTER_TYPE_P (inside_type);
5216 int inside_float = FLOAT_TYPE_P (inside_type);
5217 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5218 int inside_unsignedp = TREE_UNSIGNED (inside_type);
5219 int inter_int = INTEGRAL_TYPE_P (inter_type);
5220 int inter_ptr = POINTER_TYPE_P (inter_type);
5221 int inter_float = FLOAT_TYPE_P (inter_type);
5222 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5223 int inter_unsignedp = TREE_UNSIGNED (inter_type);
5224 int final_int = INTEGRAL_TYPE_P (final_type);
5225 int final_ptr = POINTER_TYPE_P (final_type);
5226 int final_float = FLOAT_TYPE_P (final_type);
5227 unsigned int final_prec = TYPE_PRECISION (final_type);
5228 int final_unsignedp = TREE_UNSIGNED (final_type);
5229
5230 /* In addition to the cases of two conversions in a row
5231 handled below, if we are converting something to its own
5232 type via an object of identical or wider precision, neither
5233 conversion is needed. */
5234 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
5235 && ((inter_int && final_int) || (inter_float && final_float))
5236 && inter_prec >= final_prec)
5237 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5238
5239 /* Likewise, if the intermediate and final types are either both
5240 float or both integer, we don't need the middle conversion if
5241 it is wider than the final type and doesn't change the signedness
5242 (for integers). Avoid this if the final type is a pointer
5243 since then we sometimes need the inner conversion. Likewise if
5244 the outer has a precision not equal to the size of its mode. */
5245 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5246 || (inter_float && inside_float))
5247 && inter_prec >= inside_prec
5248 && (inter_float || inter_unsignedp == inside_unsignedp)
5249 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5250 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5251 && ! final_ptr)
5252 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5253
5254 /* If we have a sign-extension of a zero-extended value, we can
5255 replace that by a single zero-extension. */
5256 if (inside_int && inter_int && final_int
5257 && inside_prec < inter_prec && inter_prec < final_prec
5258 && inside_unsignedp && !inter_unsignedp)
5259 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5260
5261 /* Two conversions in a row are not needed unless:
5262 - some conversion is floating-point (overstrict for now), or
5263 - the intermediate type is narrower than both initial and
5264 final, or
5265 - the intermediate type and innermost type differ in signedness,
5266 and the outermost type is wider than the intermediate, or
5267 - the initial type is a pointer type and the precisions of the
5268 intermediate and final types differ, or
5269 - the final type is a pointer type and the precisions of the
5270 initial and intermediate types differ. */
5271 if (! inside_float && ! inter_float && ! final_float
5272 && (inter_prec > inside_prec || inter_prec > final_prec)
5273 && ! (inside_int && inter_int
5274 && inter_unsignedp != inside_unsignedp
5275 && inter_prec < final_prec)
5276 && ((inter_unsignedp && inter_prec > inside_prec)
5277 == (final_unsignedp && final_prec > inter_prec))
5278 && ! (inside_ptr && inter_prec != final_prec)
5279 && ! (final_ptr && inside_prec != inter_prec)
5280 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5281 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5282 && ! final_ptr)
5283 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5284 }
5285
5286 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5287 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5288 /* Detect assigning a bitfield. */
5289 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5290 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5291 {
5292 /* Don't leave an assignment inside a conversion
5293 unless assigning a bitfield. */
5294 tree prev = TREE_OPERAND (t, 0);
5295 if (t == orig_t)
5296 t = copy_node (t);
5297 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
5298 /* First do the assignment, then return converted constant. */
5299 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
5300 TREE_USED (t) = 1;
5301 return t;
5302 }
5303
5304 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5305 constants (if x has signed type, the sign bit cannot be set
5306 in c). This folds extension into the BIT_AND_EXPR. */
5307 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
5308 && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
5309 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5310 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5311 {
5312 tree and = TREE_OPERAND (t, 0);
5313 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5314 int change = 0;
5315
5316 if (TREE_UNSIGNED (TREE_TYPE (and))
5317 || (TYPE_PRECISION (TREE_TYPE (t))
5318 <= TYPE_PRECISION (TREE_TYPE (and))))
5319 change = 1;
5320 else if (TYPE_PRECISION (TREE_TYPE (and1))
5321 <= HOST_BITS_PER_WIDE_INT
5322 && host_integerp (and1, 1))
5323 {
5324 unsigned HOST_WIDE_INT cst;
5325
5326 cst = tree_low_cst (and1, 1);
5327 cst &= (HOST_WIDE_INT) -1
5328 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5329 change = (cst == 0);
5330 #ifdef LOAD_EXTEND_OP
5331 if (change
5332 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5333 == ZERO_EXTEND))
5334 {
5335 tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
5336 and0 = convert (uns, and0);
5337 and1 = convert (uns, and1);
5338 }
5339 #endif
5340 }
5341 if (change)
5342 return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
5343 convert (TREE_TYPE (t), and0),
5344 convert (TREE_TYPE (t), and1)));
5345 }
5346
5347 if (!wins)
5348 {
5349 if (TREE_CONSTANT (t) != TREE_CONSTANT (arg0))
5350 {
5351 if (t == orig_t)
5352 t = copy_node (t);
5353 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
5354 }
5355 return t;
5356 }
5357 return fold_convert (t, arg0);
5358
5359 case VIEW_CONVERT_EXPR:
5360 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5361 return build1 (VIEW_CONVERT_EXPR, type,
5362 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5363 return t;
5364
5365 case COMPONENT_REF:
5366 if (TREE_CODE (arg0) == CONSTRUCTOR
5367 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
5368 {
5369 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5370 if (m)
5371 t = TREE_VALUE (m);
5372 }
5373 return t;
5374
5375 case RANGE_EXPR:
5376 if (TREE_CONSTANT (t) != wins)
5377 {
5378 if (t == orig_t)
5379 t = copy_node (t);
5380 TREE_CONSTANT (t) = wins;
5381 }
5382 return t;
5383
5384 case NEGATE_EXPR:
5385 if (wins)
5386 {
5387 if (TREE_CODE (arg0) == INTEGER_CST)
5388 {
5389 unsigned HOST_WIDE_INT low;
5390 HOST_WIDE_INT high;
5391 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5392 TREE_INT_CST_HIGH (arg0),
5393 &low, &high);
5394 t = build_int_2 (low, high);
5395 TREE_TYPE (t) = type;
5396 TREE_OVERFLOW (t)
5397 = (TREE_OVERFLOW (arg0)
5398 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
5399 TREE_CONSTANT_OVERFLOW (t)
5400 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5401 }
5402 else if (TREE_CODE (arg0) == REAL_CST)
5403 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5404 }
5405 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5406 return TREE_OPERAND (arg0, 0);
5407 /* Convert -((double)float) into (double)(-float). */
5408 else if (TREE_CODE (arg0) == NOP_EXPR
5409 && TREE_CODE (type) == REAL_TYPE)
5410 {
5411 tree targ0 = strip_float_extensions (arg0);
5412 if (targ0 != arg0)
5413 return convert (type, build1 (NEGATE_EXPR, TREE_TYPE (targ0), targ0));
5414
5415 }
5416
5417 /* Convert - (a - b) to (b - a) for non-floating-point. */
5418 else if (TREE_CODE (arg0) == MINUS_EXPR
5419 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
5420 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
5421 TREE_OPERAND (arg0, 0));
5422
5423 /* Convert -f(x) into f(-x) where f is sin, tan or atan. */
5424 switch (builtin_mathfn_code (arg0))
5425 {
5426 case BUILT_IN_SIN:
5427 case BUILT_IN_SINF:
5428 case BUILT_IN_SINL:
5429 case BUILT_IN_TAN:
5430 case BUILT_IN_TANF:
5431 case BUILT_IN_TANL:
5432 case BUILT_IN_ATAN:
5433 case BUILT_IN_ATANF:
5434 case BUILT_IN_ATANL:
5435 if (negate_expr_p (TREE_VALUE (TREE_OPERAND (arg0, 1))))
5436 {
5437 tree fndecl, arg, arglist;
5438
5439 fndecl = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5440 arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5441 arg = fold (build1 (NEGATE_EXPR, type, arg));
5442 arglist = build_tree_list (NULL_TREE, arg);
5443 return build_function_call_expr (fndecl, arglist);
5444 }
5445 break;
5446
5447 default:
5448 break;
5449 }
5450 return t;
5451
5452 case ABS_EXPR:
5453 if (wins)
5454 {
5455 if (TREE_CODE (arg0) == INTEGER_CST)
5456 {
5457 /* If the value is unsigned, then the absolute value is
5458 the same as the ordinary value. */
5459 if (TREE_UNSIGNED (type))
5460 return arg0;
5461 /* Similarly, if the value is non-negative. */
5462 else if (INT_CST_LT (integer_minus_one_node, arg0))
5463 return arg0;
5464 /* If the value is negative, then the absolute value is
5465 its negation. */
5466 else
5467 {
5468 unsigned HOST_WIDE_INT low;
5469 HOST_WIDE_INT high;
5470 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5471 TREE_INT_CST_HIGH (arg0),
5472 &low, &high);
5473 t = build_int_2 (low, high);
5474 TREE_TYPE (t) = type;
5475 TREE_OVERFLOW (t)
5476 = (TREE_OVERFLOW (arg0)
5477 | force_fit_type (t, overflow));
5478 TREE_CONSTANT_OVERFLOW (t)
5479 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5480 }
5481 }
5482 else if (TREE_CODE (arg0) == REAL_CST)
5483 {
5484 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5485 t = build_real (type,
5486 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5487 }
5488 }
5489 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5490 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
5491 /* Convert fabs((double)float) into (double)fabsf(float). */
5492 else if (TREE_CODE (arg0) == NOP_EXPR
5493 && TREE_CODE (type) == REAL_TYPE)
5494 {
5495 tree targ0 = strip_float_extensions (arg0);
5496 if (targ0 != arg0)
5497 return convert (type, fold (build1 (ABS_EXPR, TREE_TYPE (targ0),
5498 targ0)));
5499 }
5500 else if (tree_expr_nonnegative_p (arg0))
5501 return arg0;
5502 return t;
5503
5504 case CONJ_EXPR:
5505 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5506 return convert (type, arg0);
5507 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5508 return build (COMPLEX_EXPR, type,
5509 TREE_OPERAND (arg0, 0),
5510 negate_expr (TREE_OPERAND (arg0, 1)));
5511 else if (TREE_CODE (arg0) == COMPLEX_CST)
5512 return build_complex (type, TREE_REALPART (arg0),
5513 negate_expr (TREE_IMAGPART (arg0)));
5514 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5515 return fold (build (TREE_CODE (arg0), type,
5516 fold (build1 (CONJ_EXPR, type,
5517 TREE_OPERAND (arg0, 0))),
5518 fold (build1 (CONJ_EXPR,
5519 type, TREE_OPERAND (arg0, 1)))));
5520 else if (TREE_CODE (arg0) == CONJ_EXPR)
5521 return TREE_OPERAND (arg0, 0);
5522 return t;
5523
5524 case BIT_NOT_EXPR:
5525 if (wins)
5526 {
5527 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5528 ~ TREE_INT_CST_HIGH (arg0));
5529 TREE_TYPE (t) = type;
5530 force_fit_type (t, 0);
5531 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5532 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5533 }
5534 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5535 return TREE_OPERAND (arg0, 0);
5536 return t;
5537
5538 case PLUS_EXPR:
5539 /* A + (-B) -> A - B */
5540 if (TREE_CODE (arg1) == NEGATE_EXPR)
5541 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5542 /* (-A) + B -> B - A */
5543 if (TREE_CODE (arg0) == NEGATE_EXPR)
5544 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5545 else if (! FLOAT_TYPE_P (type))
5546 {
5547 if (integer_zerop (arg1))
5548 return non_lvalue (convert (type, arg0));
5549
5550 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5551 with a constant, and the two constants have no bits in common,
5552 we should treat this as a BIT_IOR_EXPR since this may produce more
5553 simplifications. */
5554 if (TREE_CODE (arg0) == BIT_AND_EXPR
5555 && TREE_CODE (arg1) == BIT_AND_EXPR
5556 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5557 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5558 && integer_zerop (const_binop (BIT_AND_EXPR,
5559 TREE_OPERAND (arg0, 1),
5560 TREE_OPERAND (arg1, 1), 0)))
5561 {
5562 code = BIT_IOR_EXPR;
5563 goto bit_ior;
5564 }
5565
5566 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5567 (plus (plus (mult) (mult)) (foo)) so that we can
5568 take advantage of the factoring cases below. */
5569 if ((TREE_CODE (arg0) == PLUS_EXPR
5570 && TREE_CODE (arg1) == MULT_EXPR)
5571 || (TREE_CODE (arg1) == PLUS_EXPR
5572 && TREE_CODE (arg0) == MULT_EXPR))
5573 {
5574 tree parg0, parg1, parg, marg;
5575
5576 if (TREE_CODE (arg0) == PLUS_EXPR)
5577 parg = arg0, marg = arg1;
5578 else
5579 parg = arg1, marg = arg0;
5580 parg0 = TREE_OPERAND (parg, 0);
5581 parg1 = TREE_OPERAND (parg, 1);
5582 STRIP_NOPS (parg0);
5583 STRIP_NOPS (parg1);
5584
5585 if (TREE_CODE (parg0) == MULT_EXPR
5586 && TREE_CODE (parg1) != MULT_EXPR)
5587 return fold (build (PLUS_EXPR, type,
5588 fold (build (PLUS_EXPR, type,
5589 convert (type, parg0),
5590 convert (type, marg))),
5591 convert (type, parg1)));
5592 if (TREE_CODE (parg0) != MULT_EXPR
5593 && TREE_CODE (parg1) == MULT_EXPR)
5594 return fold (build (PLUS_EXPR, type,
5595 fold (build (PLUS_EXPR, type,
5596 convert (type, parg1),
5597 convert (type, marg))),
5598 convert (type, parg0)));
5599 }
5600
5601 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5602 {
5603 tree arg00, arg01, arg10, arg11;
5604 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5605
5606 /* (A * C) + (B * C) -> (A+B) * C.
5607 We are most concerned about the case where C is a constant,
5608 but other combinations show up during loop reduction. Since
5609 it is not difficult, try all four possibilities. */
5610
5611 arg00 = TREE_OPERAND (arg0, 0);
5612 arg01 = TREE_OPERAND (arg0, 1);
5613 arg10 = TREE_OPERAND (arg1, 0);
5614 arg11 = TREE_OPERAND (arg1, 1);
5615 same = NULL_TREE;
5616
5617 if (operand_equal_p (arg01, arg11, 0))
5618 same = arg01, alt0 = arg00, alt1 = arg10;
5619 else if (operand_equal_p (arg00, arg10, 0))
5620 same = arg00, alt0 = arg01, alt1 = arg11;
5621 else if (operand_equal_p (arg00, arg11, 0))
5622 same = arg00, alt0 = arg01, alt1 = arg10;
5623 else if (operand_equal_p (arg01, arg10, 0))
5624 same = arg01, alt0 = arg00, alt1 = arg11;
5625
5626 /* No identical multiplicands; see if we can find a common
5627 power-of-two factor in non-power-of-two multiplies. This
5628 can help in multi-dimensional array access. */
5629 else if (TREE_CODE (arg01) == INTEGER_CST
5630 && TREE_CODE (arg11) == INTEGER_CST
5631 && TREE_INT_CST_HIGH (arg01) == 0
5632 && TREE_INT_CST_HIGH (arg11) == 0)
5633 {
5634 HOST_WIDE_INT int01, int11, tmp;
5635 int01 = TREE_INT_CST_LOW (arg01);
5636 int11 = TREE_INT_CST_LOW (arg11);
5637
5638 /* Move min of absolute values to int11. */
5639 if ((int01 >= 0 ? int01 : -int01)
5640 < (int11 >= 0 ? int11 : -int11))
5641 {
5642 tmp = int01, int01 = int11, int11 = tmp;
5643 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5644 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5645 }
5646
5647 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5648 {
5649 alt0 = fold (build (MULT_EXPR, type, arg00,
5650 build_int_2 (int01 / int11, 0)));
5651 alt1 = arg10;
5652 same = arg11;
5653 }
5654 }
5655
5656 if (same)
5657 return fold (build (MULT_EXPR, type,
5658 fold (build (PLUS_EXPR, type, alt0, alt1)),
5659 same));
5660 }
5661 }
5662
5663 /* See if ARG1 is zero and X + ARG1 reduces to X. */
5664 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
5665 return non_lvalue (convert (type, arg0));
5666
5667 /* Likewise if the operands are reversed. */
5668 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5669 return non_lvalue (convert (type, arg1));
5670
5671 bit_rotate:
5672 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5673 is a rotate of A by C1 bits. */
5674 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5675 is a rotate of A by B bits. */
5676 {
5677 enum tree_code code0, code1;
5678 code0 = TREE_CODE (arg0);
5679 code1 = TREE_CODE (arg1);
5680 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5681 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5682 && operand_equal_p (TREE_OPERAND (arg0, 0),
5683 TREE_OPERAND (arg1, 0), 0)
5684 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5685 {
5686 tree tree01, tree11;
5687 enum tree_code code01, code11;
5688
5689 tree01 = TREE_OPERAND (arg0, 1);
5690 tree11 = TREE_OPERAND (arg1, 1);
5691 STRIP_NOPS (tree01);
5692 STRIP_NOPS (tree11);
5693 code01 = TREE_CODE (tree01);
5694 code11 = TREE_CODE (tree11);
5695 if (code01 == INTEGER_CST
5696 && code11 == INTEGER_CST
5697 && TREE_INT_CST_HIGH (tree01) == 0
5698 && TREE_INT_CST_HIGH (tree11) == 0
5699 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5700 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5701 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5702 code0 == LSHIFT_EXPR ? tree01 : tree11);
5703 else if (code11 == MINUS_EXPR)
5704 {
5705 tree tree110, tree111;
5706 tree110 = TREE_OPERAND (tree11, 0);
5707 tree111 = TREE_OPERAND (tree11, 1);
5708 STRIP_NOPS (tree110);
5709 STRIP_NOPS (tree111);
5710 if (TREE_CODE (tree110) == INTEGER_CST
5711 && 0 == compare_tree_int (tree110,
5712 TYPE_PRECISION
5713 (TREE_TYPE (TREE_OPERAND
5714 (arg0, 0))))
5715 && operand_equal_p (tree01, tree111, 0))
5716 return build ((code0 == LSHIFT_EXPR
5717 ? LROTATE_EXPR
5718 : RROTATE_EXPR),
5719 type, TREE_OPERAND (arg0, 0), tree01);
5720 }
5721 else if (code01 == MINUS_EXPR)
5722 {
5723 tree tree010, tree011;
5724 tree010 = TREE_OPERAND (tree01, 0);
5725 tree011 = TREE_OPERAND (tree01, 1);
5726 STRIP_NOPS (tree010);
5727 STRIP_NOPS (tree011);
5728 if (TREE_CODE (tree010) == INTEGER_CST
5729 && 0 == compare_tree_int (tree010,
5730 TYPE_PRECISION
5731 (TREE_TYPE (TREE_OPERAND
5732 (arg0, 0))))
5733 && operand_equal_p (tree11, tree011, 0))
5734 return build ((code0 != LSHIFT_EXPR
5735 ? LROTATE_EXPR
5736 : RROTATE_EXPR),
5737 type, TREE_OPERAND (arg0, 0), tree11);
5738 }
5739 }
5740 }
5741
5742 associate:
5743 /* In most languages, can't associate operations on floats through
5744 parentheses. Rather than remember where the parentheses were, we
5745 don't associate floats at all. It shouldn't matter much. However,
5746 associating multiplications is only very slightly inaccurate, so do
5747 that if -funsafe-math-optimizations is specified. */
5748
5749 if (! wins
5750 && (! FLOAT_TYPE_P (type)
5751 || (flag_unsafe_math_optimizations && code == MULT_EXPR)))
5752 {
5753 tree var0, con0, lit0, minus_lit0;
5754 tree var1, con1, lit1, minus_lit1;
5755
5756 /* Split both trees into variables, constants, and literals. Then
5757 associate each group together, the constants with literals,
5758 then the result with variables. This increases the chances of
5759 literals being recombined later and of generating relocatable
5760 expressions for the sum of a constant and literal. */
5761 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
5762 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
5763 code == MINUS_EXPR);
5764
5765 /* Only do something if we found more than two objects. Otherwise,
5766 nothing has changed and we risk infinite recursion. */
5767 if (2 < ((var0 != 0) + (var1 != 0)
5768 + (con0 != 0) + (con1 != 0)
5769 + (lit0 != 0) + (lit1 != 0)
5770 + (minus_lit0 != 0) + (minus_lit1 != 0)))
5771 {
5772 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
5773 if (code == MINUS_EXPR)
5774 code = PLUS_EXPR;
5775
5776 var0 = associate_trees (var0, var1, code, type);
5777 con0 = associate_trees (con0, con1, code, type);
5778 lit0 = associate_trees (lit0, lit1, code, type);
5779 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
5780
5781 /* Preserve the MINUS_EXPR if the negative part of the literal is
5782 greater than the positive part. Otherwise, the multiplicative
5783 folding code (i.e extract_muldiv) may be fooled in case
5784 unsigned constants are subtracted, like in the following
5785 example: ((X*2 + 4) - 8U)/2. */
5786 if (minus_lit0 && lit0)
5787 {
5788 if (tree_int_cst_lt (lit0, minus_lit0))
5789 {
5790 minus_lit0 = associate_trees (minus_lit0, lit0,
5791 MINUS_EXPR, type);
5792 lit0 = 0;
5793 }
5794 else
5795 {
5796 lit0 = associate_trees (lit0, minus_lit0,
5797 MINUS_EXPR, type);
5798 minus_lit0 = 0;
5799 }
5800 }
5801 if (minus_lit0)
5802 {
5803 if (con0 == 0)
5804 return convert (type, associate_trees (var0, minus_lit0,
5805 MINUS_EXPR, type));
5806 else
5807 {
5808 con0 = associate_trees (con0, minus_lit0,
5809 MINUS_EXPR, type);
5810 return convert (type, associate_trees (var0, con0,
5811 PLUS_EXPR, type));
5812 }
5813 }
5814
5815 con0 = associate_trees (con0, lit0, code, type);
5816 return convert (type, associate_trees (var0, con0, code, type));
5817 }
5818 }
5819
5820 binary:
5821 if (wins)
5822 t1 = const_binop (code, arg0, arg1, 0);
5823 if (t1 != NULL_TREE)
5824 {
5825 /* The return value should always have
5826 the same type as the original expression. */
5827 if (TREE_TYPE (t1) != TREE_TYPE (t))
5828 t1 = convert (TREE_TYPE (t), t1);
5829
5830 return t1;
5831 }
5832 return t;
5833
5834 case MINUS_EXPR:
5835 /* A - (-B) -> A + B */
5836 if (TREE_CODE (arg1) == NEGATE_EXPR)
5837 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5838 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
5839 if (TREE_CODE (arg0) == NEGATE_EXPR
5840 && (FLOAT_TYPE_P (type)
5841 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
5842 && negate_expr_p (arg1)
5843 && (! TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
5844 && (! TREE_SIDE_EFFECTS (arg1) || TREE_CONSTANT (arg0)))
5845 return fold (build (MINUS_EXPR, type, negate_expr (arg1),
5846 TREE_OPERAND (arg0, 0)));
5847
5848 if (! FLOAT_TYPE_P (type))
5849 {
5850 if (! wins && integer_zerop (arg0))
5851 return negate_expr (convert (type, arg1));
5852 if (integer_zerop (arg1))
5853 return non_lvalue (convert (type, arg0));
5854
5855 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
5856 about the case where C is a constant, just try one of the
5857 four possibilities. */
5858
5859 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
5860 && operand_equal_p (TREE_OPERAND (arg0, 1),
5861 TREE_OPERAND (arg1, 1), 0))
5862 return fold (build (MULT_EXPR, type,
5863 fold (build (MINUS_EXPR, type,
5864 TREE_OPERAND (arg0, 0),
5865 TREE_OPERAND (arg1, 0))),
5866 TREE_OPERAND (arg0, 1)));
5867
5868 /* Fold A - (A & B) into ~B & A. */
5869 if (!TREE_SIDE_EFFECTS (arg0)
5870 && TREE_CODE (arg1) == BIT_AND_EXPR)
5871 {
5872 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
5873 return fold (build (BIT_AND_EXPR, type,
5874 fold (build1 (BIT_NOT_EXPR, type,
5875 TREE_OPERAND (arg1, 0))),
5876 arg0));
5877 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
5878 return fold (build (BIT_AND_EXPR, type,
5879 fold (build1 (BIT_NOT_EXPR, type,
5880 TREE_OPERAND (arg1, 1))),
5881 arg0));
5882 }
5883 }
5884
5885 /* See if ARG1 is zero and X - ARG1 reduces to X. */
5886 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
5887 return non_lvalue (convert (type, arg0));
5888
5889 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
5890 ARG0 is zero and X + ARG0 reduces to X, since that would mean
5891 (-ARG1 + ARG0) reduces to -ARG1. */
5892 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5893 return negate_expr (convert (type, arg1));
5894
5895 /* Fold &x - &x. This can happen from &x.foo - &x.
5896 This is unsafe for certain floats even in non-IEEE formats.
5897 In IEEE, it is unsafe because it does wrong for NaNs.
5898 Also note that operand_equal_p is always false if an operand
5899 is volatile. */
5900
5901 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
5902 && operand_equal_p (arg0, arg1, 0))
5903 return convert (type, integer_zero_node);
5904
5905 goto associate;
5906
5907 case MULT_EXPR:
5908 /* (-A) * (-B) -> A * B */
5909 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5910 return fold (build (MULT_EXPR, type, TREE_OPERAND (arg0, 0),
5911 TREE_OPERAND (arg1, 0)));
5912
5913 if (! FLOAT_TYPE_P (type))
5914 {
5915 if (integer_zerop (arg1))
5916 return omit_one_operand (type, arg1, arg0);
5917 if (integer_onep (arg1))
5918 return non_lvalue (convert (type, arg0));
5919
5920 /* (a * (1 << b)) is (a << b) */
5921 if (TREE_CODE (arg1) == LSHIFT_EXPR
5922 && integer_onep (TREE_OPERAND (arg1, 0)))
5923 return fold (build (LSHIFT_EXPR, type, arg0,
5924 TREE_OPERAND (arg1, 1)));
5925 if (TREE_CODE (arg0) == LSHIFT_EXPR
5926 && integer_onep (TREE_OPERAND (arg0, 0)))
5927 return fold (build (LSHIFT_EXPR, type, arg1,
5928 TREE_OPERAND (arg0, 1)));
5929
5930 if (TREE_CODE (arg1) == INTEGER_CST
5931 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
5932 convert (type, arg1),
5933 code, NULL_TREE)))
5934 return convert (type, tem);
5935
5936 }
5937 else
5938 {
5939 /* Maybe fold x * 0 to 0. The expressions aren't the same
5940 when x is NaN, since x * 0 is also NaN. Nor are they the
5941 same in modes with signed zeros, since multiplying a
5942 negative value by 0 gives -0, not +0. */
5943 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
5944 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
5945 && real_zerop (arg1))
5946 return omit_one_operand (type, arg1, arg0);
5947 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
5948 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
5949 && real_onep (arg1))
5950 return non_lvalue (convert (type, arg0));
5951
5952 /* Transform x * -1.0 into -x. */
5953 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
5954 && real_minus_onep (arg1))
5955 return fold (build1 (NEGATE_EXPR, type, arg0));
5956
5957 /* x*2 is x+x */
5958 if (! wins && real_twop (arg1)
5959 && (*lang_hooks.decls.global_bindings_p) () == 0
5960 && ! CONTAINS_PLACEHOLDER_P (arg0))
5961 {
5962 tree arg = save_expr (arg0);
5963 return fold (build (PLUS_EXPR, type, arg, arg));
5964 }
5965
5966 if (flag_unsafe_math_optimizations)
5967 {
5968 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
5969 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
5970
5971 /* Optimizations of sqrt(...)*sqrt(...). */
5972 if ((fcode0 == BUILT_IN_SQRT && fcode1 == BUILT_IN_SQRT)
5973 || (fcode0 == BUILT_IN_SQRTF && fcode1 == BUILT_IN_SQRTF)
5974 || (fcode0 == BUILT_IN_SQRTL && fcode1 == BUILT_IN_SQRTL))
5975 {
5976 tree sqrtfn, arg, arglist;
5977 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
5978 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
5979
5980 /* Optimize sqrt(x)*sqrt(x) as x. */
5981 if (operand_equal_p (arg00, arg10, 0)
5982 && ! HONOR_SNANS (TYPE_MODE (type)))
5983 return arg00;
5984
5985 /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y). */
5986 sqrtfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5987 arg = fold (build (MULT_EXPR, type, arg00, arg10));
5988 arglist = build_tree_list (NULL_TREE, arg);
5989 return build_function_call_expr (sqrtfn, arglist);
5990 }
5991
5992 /* Optimize exp(x)*exp(y) as exp(x+y). */
5993 if ((fcode0 == BUILT_IN_EXP && fcode1 == BUILT_IN_EXP)
5994 || (fcode0 == BUILT_IN_EXPF && fcode1 == BUILT_IN_EXPF)
5995 || (fcode0 == BUILT_IN_EXPL && fcode1 == BUILT_IN_EXPL))
5996 {
5997 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5998 tree arg = build (PLUS_EXPR, type,
5999 TREE_VALUE (TREE_OPERAND (arg0, 1)),
6000 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6001 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6002 return build_function_call_expr (expfn, arglist);
6003 }
6004
6005 /* Optimizations of pow(...)*pow(...). */
6006 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6007 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6008 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6009 {
6010 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6011 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6012 1)));
6013 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6014 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6015 1)));
6016
6017 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6018 if (operand_equal_p (arg01, arg11, 0))
6019 {
6020 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6021 tree arg = build (MULT_EXPR, type, arg00, arg10);
6022 tree arglist = tree_cons (NULL_TREE, fold (arg),
6023 build_tree_list (NULL_TREE,
6024 arg01));
6025 return build_function_call_expr (powfn, arglist);
6026 }
6027
6028 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6029 if (operand_equal_p (arg00, arg10, 0))
6030 {
6031 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6032 tree arg = fold (build (PLUS_EXPR, type, arg01, arg11));
6033 tree arglist = tree_cons (NULL_TREE, arg00,
6034 build_tree_list (NULL_TREE,
6035 arg));
6036 return build_function_call_expr (powfn, arglist);
6037 }
6038 }
6039
6040 /* Optimize tan(x)*cos(x) as sin(x). */
6041 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6042 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6043 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6044 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6045 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6046 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6047 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6048 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6049 {
6050 tree sinfn;
6051
6052 switch (fcode0)
6053 {
6054 case BUILT_IN_TAN:
6055 case BUILT_IN_COS:
6056 sinfn = implicit_built_in_decls[BUILT_IN_SIN];
6057 break;
6058 case BUILT_IN_TANF:
6059 case BUILT_IN_COSF:
6060 sinfn = implicit_built_in_decls[BUILT_IN_SINF];
6061 break;
6062 case BUILT_IN_TANL:
6063 case BUILT_IN_COSL:
6064 sinfn = implicit_built_in_decls[BUILT_IN_SINL];
6065 break;
6066 default:
6067 sinfn = NULL_TREE;
6068 }
6069
6070 if (sinfn != NULL_TREE)
6071 return build_function_call_expr (sinfn,
6072 TREE_OPERAND (arg0, 1));
6073 }
6074
6075 /* Optimize x*pow(x,c) as pow(x,c+1). */
6076 if (fcode1 == BUILT_IN_POW
6077 || fcode1 == BUILT_IN_POWF
6078 || fcode1 == BUILT_IN_POWL)
6079 {
6080 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6081 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6082 1)));
6083 if (TREE_CODE (arg11) == REAL_CST
6084 && ! TREE_CONSTANT_OVERFLOW (arg11)
6085 && operand_equal_p (arg0, arg10, 0))
6086 {
6087 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6088 REAL_VALUE_TYPE c;
6089 tree arg, arglist;
6090
6091 c = TREE_REAL_CST (arg11);
6092 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6093 arg = build_real (type, c);
6094 arglist = build_tree_list (NULL_TREE, arg);
6095 arglist = tree_cons (NULL_TREE, arg0, arglist);
6096 return build_function_call_expr (powfn, arglist);
6097 }
6098 }
6099
6100 /* Optimize pow(x,c)*x as pow(x,c+1). */
6101 if (fcode0 == BUILT_IN_POW
6102 || fcode0 == BUILT_IN_POWF
6103 || fcode0 == BUILT_IN_POWL)
6104 {
6105 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6106 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6107 1)));
6108 if (TREE_CODE (arg01) == REAL_CST
6109 && ! TREE_CONSTANT_OVERFLOW (arg01)
6110 && operand_equal_p (arg1, arg00, 0))
6111 {
6112 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6113 REAL_VALUE_TYPE c;
6114 tree arg, arglist;
6115
6116 c = TREE_REAL_CST (arg01);
6117 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6118 arg = build_real (type, c);
6119 arglist = build_tree_list (NULL_TREE, arg);
6120 arglist = tree_cons (NULL_TREE, arg1, arglist);
6121 return build_function_call_expr (powfn, arglist);
6122 }
6123 }
6124
6125 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6126 if (! optimize_size
6127 && operand_equal_p (arg0, arg1, 0))
6128 {
6129 tree powfn;
6130
6131 if (type == double_type_node)
6132 powfn = implicit_built_in_decls[BUILT_IN_POW];
6133 else if (type == float_type_node)
6134 powfn = implicit_built_in_decls[BUILT_IN_POWF];
6135 else if (type == long_double_type_node)
6136 powfn = implicit_built_in_decls[BUILT_IN_POWL];
6137 else
6138 powfn = NULL_TREE;
6139
6140 if (powfn)
6141 {
6142 tree arg = build_real (type, dconst2);
6143 tree arglist = build_tree_list (NULL_TREE, arg);
6144 arglist = tree_cons (NULL_TREE, arg0, arglist);
6145 return build_function_call_expr (powfn, arglist);
6146 }
6147 }
6148 }
6149 }
6150 goto associate;
6151
6152 case BIT_IOR_EXPR:
6153 bit_ior:
6154 if (integer_all_onesp (arg1))
6155 return omit_one_operand (type, arg1, arg0);
6156 if (integer_zerop (arg1))
6157 return non_lvalue (convert (type, arg0));
6158 t1 = distribute_bit_expr (code, type, arg0, arg1);
6159 if (t1 != NULL_TREE)
6160 return t1;
6161
6162 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6163
6164 This results in more efficient code for machines without a NAND
6165 instruction. Combine will canonicalize to the first form
6166 which will allow use of NAND instructions provided by the
6167 backend if they exist. */
6168 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6169 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6170 {
6171 return fold (build1 (BIT_NOT_EXPR, type,
6172 build (BIT_AND_EXPR, type,
6173 TREE_OPERAND (arg0, 0),
6174 TREE_OPERAND (arg1, 0))));
6175 }
6176
6177 /* See if this can be simplified into a rotate first. If that
6178 is unsuccessful continue in the association code. */
6179 goto bit_rotate;
6180
6181 case BIT_XOR_EXPR:
6182 if (integer_zerop (arg1))
6183 return non_lvalue (convert (type, arg0));
6184 if (integer_all_onesp (arg1))
6185 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6186
6187 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6188 with a constant, and the two constants have no bits in common,
6189 we should treat this as a BIT_IOR_EXPR since this may produce more
6190 simplifications. */
6191 if (TREE_CODE (arg0) == BIT_AND_EXPR
6192 && TREE_CODE (arg1) == BIT_AND_EXPR
6193 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6194 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6195 && integer_zerop (const_binop (BIT_AND_EXPR,
6196 TREE_OPERAND (arg0, 1),
6197 TREE_OPERAND (arg1, 1), 0)))
6198 {
6199 code = BIT_IOR_EXPR;
6200 goto bit_ior;
6201 }
6202
6203 /* See if this can be simplified into a rotate first. If that
6204 is unsuccessful continue in the association code. */
6205 goto bit_rotate;
6206
6207 case BIT_AND_EXPR:
6208 bit_and:
6209 if (integer_all_onesp (arg1))
6210 return non_lvalue (convert (type, arg0));
6211 if (integer_zerop (arg1))
6212 return omit_one_operand (type, arg1, arg0);
6213 t1 = distribute_bit_expr (code, type, arg0, arg1);
6214 if (t1 != NULL_TREE)
6215 return t1;
6216 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6217 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6218 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6219 {
6220 unsigned int prec
6221 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6222
6223 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6224 && (~TREE_INT_CST_LOW (arg1)
6225 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6226 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
6227 }
6228
6229 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6230
6231 This results in more efficient code for machines without a NOR
6232 instruction. Combine will canonicalize to the first form
6233 which will allow use of NOR instructions provided by the
6234 backend if they exist. */
6235 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6236 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6237 {
6238 return fold (build1 (BIT_NOT_EXPR, type,
6239 build (BIT_IOR_EXPR, type,
6240 TREE_OPERAND (arg0, 0),
6241 TREE_OPERAND (arg1, 0))));
6242 }
6243
6244 goto associate;
6245
6246 case BIT_ANDTC_EXPR:
6247 if (integer_all_onesp (arg0))
6248 return non_lvalue (convert (type, arg1));
6249 if (integer_zerop (arg0))
6250 return omit_one_operand (type, arg0, arg1);
6251 if (TREE_CODE (arg1) == INTEGER_CST)
6252 {
6253 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
6254 code = BIT_AND_EXPR;
6255 goto bit_and;
6256 }
6257 goto binary;
6258
6259 case RDIV_EXPR:
6260 /* Don't touch a floating-point divide by zero unless the mode
6261 of the constant can represent infinity. */
6262 if (TREE_CODE (arg1) == REAL_CST
6263 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6264 && real_zerop (arg1))
6265 return t;
6266
6267 /* (-A) / (-B) -> A / B */
6268 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
6269 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6270 TREE_OPERAND (arg1, 0)));
6271
6272 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6273 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6274 && real_onep (arg1))
6275 return non_lvalue (convert (type, arg0));
6276
6277 /* If ARG1 is a constant, we can convert this to a multiply by the
6278 reciprocal. This does not have the same rounding properties,
6279 so only do this if -funsafe-math-optimizations. We can actually
6280 always safely do it if ARG1 is a power of two, but it's hard to
6281 tell if it is or not in a portable manner. */
6282 if (TREE_CODE (arg1) == REAL_CST)
6283 {
6284 if (flag_unsafe_math_optimizations
6285 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6286 arg1, 0)))
6287 return fold (build (MULT_EXPR, type, arg0, tem));
6288 /* Find the reciprocal if optimizing and the result is exact. */
6289 else if (optimize)
6290 {
6291 REAL_VALUE_TYPE r;
6292 r = TREE_REAL_CST (arg1);
6293 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6294 {
6295 tem = build_real (type, r);
6296 return fold (build (MULT_EXPR, type, arg0, tem));
6297 }
6298 }
6299 }
6300 /* Convert A/B/C to A/(B*C). */
6301 if (flag_unsafe_math_optimizations
6302 && TREE_CODE (arg0) == RDIV_EXPR)
6303 {
6304 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6305 build (MULT_EXPR, type, TREE_OPERAND (arg0, 1),
6306 arg1)));
6307 }
6308 /* Convert A/(B/C) to (A/B)*C. */
6309 if (flag_unsafe_math_optimizations
6310 && TREE_CODE (arg1) == RDIV_EXPR)
6311 {
6312 return fold (build (MULT_EXPR, type,
6313 build (RDIV_EXPR, type, arg0,
6314 TREE_OPERAND (arg1, 0)),
6315 TREE_OPERAND (arg1, 1)));
6316 }
6317
6318 if (flag_unsafe_math_optimizations)
6319 {
6320 enum built_in_function fcode = builtin_mathfn_code (arg1);
6321 /* Optimize x/exp(y) into x*exp(-y). */
6322 if (fcode == BUILT_IN_EXP
6323 || fcode == BUILT_IN_EXPF
6324 || fcode == BUILT_IN_EXPL)
6325 {
6326 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6327 tree arg = build1 (NEGATE_EXPR, type,
6328 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6329 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6330 arg1 = build_function_call_expr (expfn, arglist);
6331 return fold (build (MULT_EXPR, type, arg0, arg1));
6332 }
6333
6334 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6335 if (fcode == BUILT_IN_POW
6336 || fcode == BUILT_IN_POWF
6337 || fcode == BUILT_IN_POWL)
6338 {
6339 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6340 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6341 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6342 tree neg11 = fold (build1 (NEGATE_EXPR, type, arg11));
6343 tree arglist = tree_cons(NULL_TREE, arg10,
6344 build_tree_list (NULL_TREE, neg11));
6345 arg1 = build_function_call_expr (powfn, arglist);
6346 return fold (build (MULT_EXPR, type, arg0, arg1));
6347 }
6348 }
6349
6350 if (flag_unsafe_math_optimizations)
6351 {
6352 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6353 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6354
6355 /* Optimize sin(x)/cos(x) as tan(x). */
6356 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
6357 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
6358 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
6359 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6360 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6361 {
6362 tree tanfn;
6363
6364 if (fcode0 == BUILT_IN_SIN)
6365 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6366 else if (fcode0 == BUILT_IN_SINF)
6367 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6368 else if (fcode0 == BUILT_IN_SINL)
6369 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6370 else
6371 tanfn = NULL_TREE;
6372
6373 if (tanfn != NULL_TREE)
6374 return build_function_call_expr (tanfn,
6375 TREE_OPERAND (arg0, 1));
6376 }
6377
6378 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
6379 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
6380 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
6381 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
6382 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6383 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6384 {
6385 tree tanfn;
6386
6387 if (fcode0 == BUILT_IN_COS)
6388 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6389 else if (fcode0 == BUILT_IN_COSF)
6390 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6391 else if (fcode0 == BUILT_IN_COSL)
6392 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6393 else
6394 tanfn = NULL_TREE;
6395
6396 if (tanfn != NULL_TREE)
6397 {
6398 tree tmp = TREE_OPERAND (arg0, 1);
6399 tmp = build_function_call_expr (tanfn, tmp);
6400 return fold (build (RDIV_EXPR, type,
6401 build_real (type, dconst1),
6402 tmp));
6403 }
6404 }
6405
6406 /* Optimize pow(x,c)/x as pow(x,c-1). */
6407 if (fcode0 == BUILT_IN_POW
6408 || fcode0 == BUILT_IN_POWF
6409 || fcode0 == BUILT_IN_POWL)
6410 {
6411 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6412 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
6413 if (TREE_CODE (arg01) == REAL_CST
6414 && ! TREE_CONSTANT_OVERFLOW (arg01)
6415 && operand_equal_p (arg1, arg00, 0))
6416 {
6417 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6418 REAL_VALUE_TYPE c;
6419 tree arg, arglist;
6420
6421 c = TREE_REAL_CST (arg01);
6422 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
6423 arg = build_real (type, c);
6424 arglist = build_tree_list (NULL_TREE, arg);
6425 arglist = tree_cons (NULL_TREE, arg1, arglist);
6426 return build_function_call_expr (powfn, arglist);
6427 }
6428 }
6429 }
6430 goto binary;
6431
6432 case TRUNC_DIV_EXPR:
6433 case ROUND_DIV_EXPR:
6434 case FLOOR_DIV_EXPR:
6435 case CEIL_DIV_EXPR:
6436 case EXACT_DIV_EXPR:
6437 if (integer_onep (arg1))
6438 return non_lvalue (convert (type, arg0));
6439 if (integer_zerop (arg1))
6440 return t;
6441
6442 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6443 operation, EXACT_DIV_EXPR.
6444
6445 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6446 At one time others generated faster code, it's not clear if they do
6447 after the last round to changes to the DIV code in expmed.c. */
6448 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
6449 && multiple_of_p (type, arg0, arg1))
6450 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
6451
6452 if (TREE_CODE (arg1) == INTEGER_CST
6453 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6454 code, NULL_TREE)))
6455 return convert (type, tem);
6456
6457 goto binary;
6458
6459 case CEIL_MOD_EXPR:
6460 case FLOOR_MOD_EXPR:
6461 case ROUND_MOD_EXPR:
6462 case TRUNC_MOD_EXPR:
6463 if (integer_onep (arg1))
6464 return omit_one_operand (type, integer_zero_node, arg0);
6465 if (integer_zerop (arg1))
6466 return t;
6467
6468 if (TREE_CODE (arg1) == INTEGER_CST
6469 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6470 code, NULL_TREE)))
6471 return convert (type, tem);
6472
6473 goto binary;
6474
6475 case LROTATE_EXPR:
6476 case RROTATE_EXPR:
6477 if (integer_all_onesp (arg0))
6478 return omit_one_operand (type, arg0, arg1);
6479 goto shift;
6480
6481 case RSHIFT_EXPR:
6482 /* Optimize -1 >> x for arithmetic right shifts. */
6483 if (integer_all_onesp (arg0) && ! TREE_UNSIGNED (type))
6484 return omit_one_operand (type, arg0, arg1);
6485 /* ... fall through ... */
6486
6487 case LSHIFT_EXPR:
6488 shift:
6489 if (integer_zerop (arg1))
6490 return non_lvalue (convert (type, arg0));
6491 if (integer_zerop (arg0))
6492 return omit_one_operand (type, arg0, arg1);
6493
6494 /* Since negative shift count is not well-defined,
6495 don't try to compute it in the compiler. */
6496 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
6497 return t;
6498 /* Rewrite an LROTATE_EXPR by a constant into an
6499 RROTATE_EXPR by a new constant. */
6500 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
6501 {
6502 if (t == orig_t)
6503 t = copy_node (t);
6504 TREE_SET_CODE (t, RROTATE_EXPR);
6505 code = RROTATE_EXPR;
6506 TREE_OPERAND (t, 1) = arg1
6507 = const_binop
6508 (MINUS_EXPR,
6509 convert (TREE_TYPE (arg1),
6510 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
6511 arg1, 0);
6512 if (tree_int_cst_sgn (arg1) < 0)
6513 return t;
6514 }
6515
6516 /* If we have a rotate of a bit operation with the rotate count and
6517 the second operand of the bit operation both constant,
6518 permute the two operations. */
6519 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6520 && (TREE_CODE (arg0) == BIT_AND_EXPR
6521 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
6522 || TREE_CODE (arg0) == BIT_IOR_EXPR
6523 || TREE_CODE (arg0) == BIT_XOR_EXPR)
6524 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6525 return fold (build (TREE_CODE (arg0), type,
6526 fold (build (code, type,
6527 TREE_OPERAND (arg0, 0), arg1)),
6528 fold (build (code, type,
6529 TREE_OPERAND (arg0, 1), arg1))));
6530
6531 /* Two consecutive rotates adding up to the width of the mode can
6532 be ignored. */
6533 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6534 && TREE_CODE (arg0) == RROTATE_EXPR
6535 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6536 && TREE_INT_CST_HIGH (arg1) == 0
6537 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
6538 && ((TREE_INT_CST_LOW (arg1)
6539 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
6540 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
6541 return TREE_OPERAND (arg0, 0);
6542
6543 goto binary;
6544
6545 case MIN_EXPR:
6546 if (operand_equal_p (arg0, arg1, 0))
6547 return omit_one_operand (type, arg0, arg1);
6548 if (INTEGRAL_TYPE_P (type)
6549 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
6550 return omit_one_operand (type, arg1, arg0);
6551 goto associate;
6552
6553 case MAX_EXPR:
6554 if (operand_equal_p (arg0, arg1, 0))
6555 return omit_one_operand (type, arg0, arg1);
6556 if (INTEGRAL_TYPE_P (type)
6557 && TYPE_MAX_VALUE (type)
6558 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
6559 return omit_one_operand (type, arg1, arg0);
6560 goto associate;
6561
6562 case TRUTH_NOT_EXPR:
6563 /* Note that the operand of this must be an int
6564 and its values must be 0 or 1.
6565 ("true" is a fixed value perhaps depending on the language,
6566 but we don't handle values other than 1 correctly yet.) */
6567 tem = invert_truthvalue (arg0);
6568 /* Avoid infinite recursion. */
6569 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6570 {
6571 tem = fold_single_bit_test (code, arg0, arg1, type);
6572 if (tem)
6573 return tem;
6574 return t;
6575 }
6576 return convert (type, tem);
6577
6578 case TRUTH_ANDIF_EXPR:
6579 /* Note that the operands of this must be ints
6580 and their values must be 0 or 1.
6581 ("true" is a fixed value perhaps depending on the language.) */
6582 /* If first arg is constant zero, return it. */
6583 if (integer_zerop (arg0))
6584 return convert (type, arg0);
6585 case TRUTH_AND_EXPR:
6586 /* If either arg is constant true, drop it. */
6587 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6588 return non_lvalue (convert (type, arg1));
6589 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
6590 /* Preserve sequence points. */
6591 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
6592 return non_lvalue (convert (type, arg0));
6593 /* If second arg is constant zero, result is zero, but first arg
6594 must be evaluated. */
6595 if (integer_zerop (arg1))
6596 return omit_one_operand (type, arg1, arg0);
6597 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
6598 case will be handled here. */
6599 if (integer_zerop (arg0))
6600 return omit_one_operand (type, arg0, arg1);
6601
6602 truth_andor:
6603 /* We only do these simplifications if we are optimizing. */
6604 if (!optimize)
6605 return t;
6606
6607 /* Check for things like (A || B) && (A || C). We can convert this
6608 to A || (B && C). Note that either operator can be any of the four
6609 truth and/or operations and the transformation will still be
6610 valid. Also note that we only care about order for the
6611 ANDIF and ORIF operators. If B contains side effects, this
6612 might change the truth-value of A. */
6613 if (TREE_CODE (arg0) == TREE_CODE (arg1)
6614 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
6615 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
6616 || TREE_CODE (arg0) == TRUTH_AND_EXPR
6617 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
6618 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
6619 {
6620 tree a00 = TREE_OPERAND (arg0, 0);
6621 tree a01 = TREE_OPERAND (arg0, 1);
6622 tree a10 = TREE_OPERAND (arg1, 0);
6623 tree a11 = TREE_OPERAND (arg1, 1);
6624 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
6625 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
6626 && (code == TRUTH_AND_EXPR
6627 || code == TRUTH_OR_EXPR));
6628
6629 if (operand_equal_p (a00, a10, 0))
6630 return fold (build (TREE_CODE (arg0), type, a00,
6631 fold (build (code, type, a01, a11))));
6632 else if (commutative && operand_equal_p (a00, a11, 0))
6633 return fold (build (TREE_CODE (arg0), type, a00,
6634 fold (build (code, type, a01, a10))));
6635 else if (commutative && operand_equal_p (a01, a10, 0))
6636 return fold (build (TREE_CODE (arg0), type, a01,
6637 fold (build (code, type, a00, a11))));
6638
6639 /* This case if tricky because we must either have commutative
6640 operators or else A10 must not have side-effects. */
6641
6642 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
6643 && operand_equal_p (a01, a11, 0))
6644 return fold (build (TREE_CODE (arg0), type,
6645 fold (build (code, type, a00, a10)),
6646 a01));
6647 }
6648
6649 /* See if we can build a range comparison. */
6650 if (0 != (tem = fold_range_test (t)))
6651 return tem;
6652
6653 /* Check for the possibility of merging component references. If our
6654 lhs is another similar operation, try to merge its rhs with our
6655 rhs. Then try to merge our lhs and rhs. */
6656 if (TREE_CODE (arg0) == code
6657 && 0 != (tem = fold_truthop (code, type,
6658 TREE_OPERAND (arg0, 1), arg1)))
6659 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6660
6661 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
6662 return tem;
6663
6664 return t;
6665
6666 case TRUTH_ORIF_EXPR:
6667 /* Note that the operands of this must be ints
6668 and their values must be 0 or true.
6669 ("true" is a fixed value perhaps depending on the language.) */
6670 /* If first arg is constant true, return it. */
6671 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6672 return convert (type, arg0);
6673 case TRUTH_OR_EXPR:
6674 /* If either arg is constant zero, drop it. */
6675 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
6676 return non_lvalue (convert (type, arg1));
6677 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
6678 /* Preserve sequence points. */
6679 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
6680 return non_lvalue (convert (type, arg0));
6681 /* If second arg is constant true, result is true, but we must
6682 evaluate first arg. */
6683 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
6684 return omit_one_operand (type, arg1, arg0);
6685 /* Likewise for first arg, but note this only occurs here for
6686 TRUTH_OR_EXPR. */
6687 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6688 return omit_one_operand (type, arg0, arg1);
6689 goto truth_andor;
6690
6691 case TRUTH_XOR_EXPR:
6692 /* If either arg is constant zero, drop it. */
6693 if (integer_zerop (arg0))
6694 return non_lvalue (convert (type, arg1));
6695 if (integer_zerop (arg1))
6696 return non_lvalue (convert (type, arg0));
6697 /* If either arg is constant true, this is a logical inversion. */
6698 if (integer_onep (arg0))
6699 return non_lvalue (convert (type, invert_truthvalue (arg1)));
6700 if (integer_onep (arg1))
6701 return non_lvalue (convert (type, invert_truthvalue (arg0)));
6702 return t;
6703
6704 case EQ_EXPR:
6705 case NE_EXPR:
6706 case LT_EXPR:
6707 case GT_EXPR:
6708 case LE_EXPR:
6709 case GE_EXPR:
6710 /* If one arg is a real or integer constant, put it last. */
6711 if ((TREE_CODE (arg0) == INTEGER_CST
6712 && TREE_CODE (arg1) != INTEGER_CST)
6713 || (TREE_CODE (arg0) == REAL_CST
6714 && TREE_CODE (arg0) != REAL_CST))
6715 {
6716 if (t == orig_t)
6717 t = copy_node (t);
6718 TREE_OPERAND (t, 0) = arg1;
6719 TREE_OPERAND (t, 1) = arg0;
6720 arg0 = TREE_OPERAND (t, 0);
6721 arg1 = TREE_OPERAND (t, 1);
6722 code = swap_tree_comparison (code);
6723 TREE_SET_CODE (t, code);
6724 }
6725
6726 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
6727 {
6728 tree targ0 = strip_float_extensions (arg0);
6729 tree targ1 = strip_float_extensions (arg1);
6730 tree newtype = TREE_TYPE (targ0);
6731
6732 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
6733 newtype = TREE_TYPE (targ1);
6734
6735 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
6736 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
6737 return fold (build (code, type, convert (newtype, targ0),
6738 convert (newtype, targ1)));
6739
6740 /* (-a) CMP (-b) -> b CMP a */
6741 if (TREE_CODE (arg0) == NEGATE_EXPR
6742 && TREE_CODE (arg1) == NEGATE_EXPR)
6743 return fold (build (code, type, TREE_OPERAND (arg1, 0),
6744 TREE_OPERAND (arg0, 0)));
6745
6746 if (TREE_CODE (arg1) == REAL_CST)
6747 {
6748 REAL_VALUE_TYPE cst;
6749 cst = TREE_REAL_CST (arg1);
6750
6751 /* (-a) CMP CST -> a swap(CMP) (-CST) */
6752 if (TREE_CODE (arg0) == NEGATE_EXPR)
6753 return
6754 fold (build (swap_tree_comparison (code), type,
6755 TREE_OPERAND (arg0, 0),
6756 build_real (TREE_TYPE (arg1),
6757 REAL_VALUE_NEGATE (cst))));
6758
6759 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
6760 /* a CMP (-0) -> a CMP 0 */
6761 if (REAL_VALUE_MINUS_ZERO (cst))
6762 return fold (build (code, type, arg0,
6763 build_real (TREE_TYPE (arg1), dconst0)));
6764
6765 /* x != NaN is always true, other ops are always false. */
6766 if (REAL_VALUE_ISNAN (cst)
6767 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
6768 {
6769 t = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
6770 return omit_one_operand (type, convert (type, t), arg0);
6771 }
6772
6773 /* Fold comparisons against infinity. */
6774 if (REAL_VALUE_ISINF (cst))
6775 {
6776 tem = fold_inf_compare (code, type, arg0, arg1);
6777 if (tem != NULL_TREE)
6778 return tem;
6779 }
6780 }
6781
6782 /* If this is a comparison of a real constant with a PLUS_EXPR
6783 or a MINUS_EXPR of a real constant, we can convert it into a
6784 comparison with a revised real constant as long as no overflow
6785 occurs when unsafe_math_optimizations are enabled. */
6786 if (flag_unsafe_math_optimizations
6787 && TREE_CODE (arg1) == REAL_CST
6788 && (TREE_CODE (arg0) == PLUS_EXPR
6789 || TREE_CODE (arg0) == MINUS_EXPR)
6790 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6791 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6792 ? MINUS_EXPR : PLUS_EXPR,
6793 arg1, TREE_OPERAND (arg0, 1), 0))
6794 && ! TREE_CONSTANT_OVERFLOW (tem))
6795 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6796
6797 /* Likewise, we can simplify a comparison of a real constant with
6798 a MINUS_EXPR whose first operand is also a real constant, i.e.
6799 (c1 - x) < c2 becomes x > c1-c2. */
6800 if (flag_unsafe_math_optimizations
6801 && TREE_CODE (arg1) == REAL_CST
6802 && TREE_CODE (arg0) == MINUS_EXPR
6803 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
6804 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
6805 arg1, 0))
6806 && ! TREE_CONSTANT_OVERFLOW (tem))
6807 return fold (build (swap_tree_comparison (code), type,
6808 TREE_OPERAND (arg0, 1), tem));
6809
6810 /* Fold comparisons against built-in math functions. */
6811 if (TREE_CODE (arg1) == REAL_CST
6812 && flag_unsafe_math_optimizations
6813 && ! flag_errno_math)
6814 {
6815 enum built_in_function fcode = builtin_mathfn_code (arg0);
6816
6817 if (fcode != END_BUILTINS)
6818 {
6819 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
6820 if (tem != NULL_TREE)
6821 return tem;
6822 }
6823 }
6824 }
6825
6826 /* Convert foo++ == CONST into ++foo == CONST + INCR.
6827 First, see if one arg is constant; find the constant arg
6828 and the other one. */
6829 {
6830 tree constop = 0, varop = NULL_TREE;
6831 int constopnum = -1;
6832
6833 if (TREE_CONSTANT (arg1))
6834 constopnum = 1, constop = arg1, varop = arg0;
6835 if (TREE_CONSTANT (arg0))
6836 constopnum = 0, constop = arg0, varop = arg1;
6837
6838 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
6839 {
6840 /* This optimization is invalid for ordered comparisons
6841 if CONST+INCR overflows or if foo+incr might overflow.
6842 This optimization is invalid for floating point due to rounding.
6843 For pointer types we assume overflow doesn't happen. */
6844 if (POINTER_TYPE_P (TREE_TYPE (varop))
6845 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6846 && (code == EQ_EXPR || code == NE_EXPR)))
6847 {
6848 tree newconst
6849 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
6850 constop, TREE_OPERAND (varop, 1)));
6851
6852 /* Do not overwrite the current varop to be a preincrement,
6853 create a new node so that we won't confuse our caller who
6854 might create trees and throw them away, reusing the
6855 arguments that they passed to build. This shows up in
6856 the THEN or ELSE parts of ?: being postincrements. */
6857 varop = build (PREINCREMENT_EXPR, TREE_TYPE (varop),
6858 TREE_OPERAND (varop, 0),
6859 TREE_OPERAND (varop, 1));
6860
6861 /* If VAROP is a reference to a bitfield, we must mask
6862 the constant by the width of the field. */
6863 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6864 && DECL_BIT_FIELD(TREE_OPERAND
6865 (TREE_OPERAND (varop, 0), 1)))
6866 {
6867 int size
6868 = TREE_INT_CST_LOW (DECL_SIZE
6869 (TREE_OPERAND
6870 (TREE_OPERAND (varop, 0), 1)));
6871 tree mask, unsigned_type;
6872 unsigned int precision;
6873 tree folded_compare;
6874
6875 /* First check whether the comparison would come out
6876 always the same. If we don't do that we would
6877 change the meaning with the masking. */
6878 if (constopnum == 0)
6879 folded_compare = fold (build (code, type, constop,
6880 TREE_OPERAND (varop, 0)));
6881 else
6882 folded_compare = fold (build (code, type,
6883 TREE_OPERAND (varop, 0),
6884 constop));
6885 if (integer_zerop (folded_compare)
6886 || integer_onep (folded_compare))
6887 return omit_one_operand (type, folded_compare, varop);
6888
6889 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
6890 precision = TYPE_PRECISION (unsigned_type);
6891 mask = build_int_2 (~0, ~0);
6892 TREE_TYPE (mask) = unsigned_type;
6893 force_fit_type (mask, 0);
6894 mask = const_binop (RSHIFT_EXPR, mask,
6895 size_int (precision - size), 0);
6896 newconst = fold (build (BIT_AND_EXPR,
6897 TREE_TYPE (varop), newconst,
6898 convert (TREE_TYPE (varop),
6899 mask)));
6900 }
6901
6902 t = build (code, type,
6903 (constopnum == 0) ? newconst : varop,
6904 (constopnum == 1) ? newconst : varop);
6905 return t;
6906 }
6907 }
6908 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
6909 {
6910 if (POINTER_TYPE_P (TREE_TYPE (varop))
6911 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6912 && (code == EQ_EXPR || code == NE_EXPR)))
6913 {
6914 tree newconst
6915 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
6916 constop, TREE_OPERAND (varop, 1)));
6917
6918 /* Do not overwrite the current varop to be a predecrement,
6919 create a new node so that we won't confuse our caller who
6920 might create trees and throw them away, reusing the
6921 arguments that they passed to build. This shows up in
6922 the THEN or ELSE parts of ?: being postdecrements. */
6923 varop = build (PREDECREMENT_EXPR, TREE_TYPE (varop),
6924 TREE_OPERAND (varop, 0),
6925 TREE_OPERAND (varop, 1));
6926
6927 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6928 && DECL_BIT_FIELD(TREE_OPERAND
6929 (TREE_OPERAND (varop, 0), 1)))
6930 {
6931 int size
6932 = TREE_INT_CST_LOW (DECL_SIZE
6933 (TREE_OPERAND
6934 (TREE_OPERAND (varop, 0), 1)));
6935 tree mask, unsigned_type;
6936 unsigned int precision;
6937 tree folded_compare;
6938
6939 if (constopnum == 0)
6940 folded_compare = fold (build (code, type, constop,
6941 TREE_OPERAND (varop, 0)));
6942 else
6943 folded_compare = fold (build (code, type,
6944 TREE_OPERAND (varop, 0),
6945 constop));
6946 if (integer_zerop (folded_compare)
6947 || integer_onep (folded_compare))
6948 return omit_one_operand (type, folded_compare, varop);
6949
6950 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
6951 precision = TYPE_PRECISION (unsigned_type);
6952 mask = build_int_2 (~0, ~0);
6953 TREE_TYPE (mask) = TREE_TYPE (varop);
6954 force_fit_type (mask, 0);
6955 mask = const_binop (RSHIFT_EXPR, mask,
6956 size_int (precision - size), 0);
6957 newconst = fold (build (BIT_AND_EXPR,
6958 TREE_TYPE (varop), newconst,
6959 convert (TREE_TYPE (varop),
6960 mask)));
6961 }
6962
6963 t = build (code, type,
6964 (constopnum == 0) ? newconst : varop,
6965 (constopnum == 1) ? newconst : varop);
6966 return t;
6967 }
6968 }
6969 }
6970
6971 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
6972 This transformation affects the cases which are handled in later
6973 optimizations involving comparisons with non-negative constants. */
6974 if (TREE_CODE (arg1) == INTEGER_CST
6975 && TREE_CODE (arg0) != INTEGER_CST
6976 && tree_int_cst_sgn (arg1) > 0)
6977 {
6978 switch (code)
6979 {
6980 case GE_EXPR:
6981 code = GT_EXPR;
6982 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6983 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6984 break;
6985
6986 case LT_EXPR:
6987 code = LE_EXPR;
6988 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6989 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6990 break;
6991
6992 default:
6993 break;
6994 }
6995 }
6996
6997 /* Comparisons with the highest or lowest possible integer of
6998 the specified size will have known values. */
6999 {
7000 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7001
7002 if (TREE_CODE (arg1) == INTEGER_CST
7003 && ! TREE_CONSTANT_OVERFLOW (arg1)
7004 && width <= HOST_BITS_PER_WIDE_INT
7005 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7006 || POINTER_TYPE_P (TREE_TYPE (arg1))))
7007 {
7008 unsigned HOST_WIDE_INT signed_max;
7009 unsigned HOST_WIDE_INT max, min;
7010
7011 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7012
7013 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7014 {
7015 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7016 min = 0;
7017 }
7018 else
7019 {
7020 max = signed_max;
7021 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7022 }
7023
7024 if (TREE_INT_CST_HIGH (arg1) == 0
7025 && TREE_INT_CST_LOW (arg1) == max)
7026 switch (code)
7027 {
7028 case GT_EXPR:
7029 return omit_one_operand (type,
7030 convert (type, integer_zero_node),
7031 arg0);
7032 case GE_EXPR:
7033 code = EQ_EXPR;
7034 if (t == orig_t)
7035 t = copy_node (t);
7036 TREE_SET_CODE (t, EQ_EXPR);
7037 break;
7038 case LE_EXPR:
7039 return omit_one_operand (type,
7040 convert (type, integer_one_node),
7041 arg0);
7042 case LT_EXPR:
7043 code = NE_EXPR;
7044 if (t == orig_t)
7045 t = copy_node (t);
7046 TREE_SET_CODE (t, NE_EXPR);
7047 break;
7048
7049 /* The GE_EXPR and LT_EXPR cases above are not normally
7050 reached because of previous transformations. */
7051
7052 default:
7053 break;
7054 }
7055 else if (TREE_INT_CST_HIGH (arg1) == 0
7056 && TREE_INT_CST_LOW (arg1) == max - 1)
7057 switch (code)
7058 {
7059 case GT_EXPR:
7060 code = EQ_EXPR;
7061 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7062 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7063 break;
7064 case LE_EXPR:
7065 code = NE_EXPR;
7066 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7067 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7068 break;
7069 default:
7070 break;
7071 }
7072 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7073 && TREE_INT_CST_LOW (arg1) == min)
7074 switch (code)
7075 {
7076 case LT_EXPR:
7077 return omit_one_operand (type,
7078 convert (type, integer_zero_node),
7079 arg0);
7080 case LE_EXPR:
7081 code = EQ_EXPR;
7082 if (t == orig_t)
7083 t = copy_node (t);
7084 TREE_SET_CODE (t, EQ_EXPR);
7085 break;
7086
7087 case GE_EXPR:
7088 return omit_one_operand (type,
7089 convert (type, integer_one_node),
7090 arg0);
7091 case GT_EXPR:
7092 code = NE_EXPR;
7093 if (t == orig_t)
7094 t = copy_node (t);
7095 TREE_SET_CODE (t, NE_EXPR);
7096 break;
7097
7098 default:
7099 break;
7100 }
7101 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7102 && TREE_INT_CST_LOW (arg1) == min + 1)
7103 switch (code)
7104 {
7105 case GE_EXPR:
7106 code = NE_EXPR;
7107 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7108 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7109 break;
7110 case LT_EXPR:
7111 code = EQ_EXPR;
7112 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7113 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7114 break;
7115 default:
7116 break;
7117 }
7118
7119 else if (TREE_INT_CST_HIGH (arg1) == 0
7120 && TREE_INT_CST_LOW (arg1) == signed_max
7121 && TREE_UNSIGNED (TREE_TYPE (arg1))
7122 /* signed_type does not work on pointer types. */
7123 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7124 {
7125 /* The following case also applies to X < signed_max+1
7126 and X >= signed_max+1 because previous transformations. */
7127 if (code == LE_EXPR || code == GT_EXPR)
7128 {
7129 tree st0, st1;
7130 st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
7131 st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
7132 return fold
7133 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7134 type, convert (st0, arg0),
7135 convert (st1, integer_zero_node)));
7136 }
7137 }
7138 }
7139 }
7140
7141 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7142 a MINUS_EXPR of a constant, we can convert it into a comparison with
7143 a revised constant as long as no overflow occurs. */
7144 if ((code == EQ_EXPR || code == NE_EXPR)
7145 && TREE_CODE (arg1) == INTEGER_CST
7146 && (TREE_CODE (arg0) == PLUS_EXPR
7147 || TREE_CODE (arg0) == MINUS_EXPR)
7148 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7149 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7150 ? MINUS_EXPR : PLUS_EXPR,
7151 arg1, TREE_OPERAND (arg0, 1), 0))
7152 && ! TREE_CONSTANT_OVERFLOW (tem))
7153 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7154
7155 /* Similarly for a NEGATE_EXPR. */
7156 else if ((code == EQ_EXPR || code == NE_EXPR)
7157 && TREE_CODE (arg0) == NEGATE_EXPR
7158 && TREE_CODE (arg1) == INTEGER_CST
7159 && 0 != (tem = negate_expr (arg1))
7160 && TREE_CODE (tem) == INTEGER_CST
7161 && ! TREE_CONSTANT_OVERFLOW (tem))
7162 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7163
7164 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7165 for !=. Don't do this for ordered comparisons due to overflow. */
7166 else if ((code == NE_EXPR || code == EQ_EXPR)
7167 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7168 return fold (build (code, type,
7169 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7170
7171 /* If we are widening one operand of an integer comparison,
7172 see if the other operand is similarly being widened. Perhaps we
7173 can do the comparison in the narrower type. */
7174 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7175 && TREE_CODE (arg0) == NOP_EXPR
7176 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7177 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7178 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7179 || (TREE_CODE (t1) == INTEGER_CST
7180 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7181 return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
7182
7183 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7184 constant, we can simplify it. */
7185 else if (TREE_CODE (arg1) == INTEGER_CST
7186 && (TREE_CODE (arg0) == MIN_EXPR
7187 || TREE_CODE (arg0) == MAX_EXPR)
7188 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7189 return optimize_minmax_comparison (t);
7190
7191 /* If we are comparing an ABS_EXPR with a constant, we can
7192 convert all the cases into explicit comparisons, but they may
7193 well not be faster than doing the ABS and one comparison.
7194 But ABS (X) <= C is a range comparison, which becomes a subtraction
7195 and a comparison, and is probably faster. */
7196 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7197 && TREE_CODE (arg0) == ABS_EXPR
7198 && ! TREE_SIDE_EFFECTS (arg0)
7199 && (0 != (tem = negate_expr (arg1)))
7200 && TREE_CODE (tem) == INTEGER_CST
7201 && ! TREE_CONSTANT_OVERFLOW (tem))
7202 return fold (build (TRUTH_ANDIF_EXPR, type,
7203 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
7204 build (LE_EXPR, type,
7205 TREE_OPERAND (arg0, 0), arg1)));
7206
7207 /* If this is an EQ or NE comparison with zero and ARG0 is
7208 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7209 two operations, but the latter can be done in one less insn
7210 on machines that have only two-operand insns or on which a
7211 constant cannot be the first operand. */
7212 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7213 && TREE_CODE (arg0) == BIT_AND_EXPR)
7214 {
7215 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
7216 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
7217 return
7218 fold (build (code, type,
7219 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7220 build (RSHIFT_EXPR,
7221 TREE_TYPE (TREE_OPERAND (arg0, 0)),
7222 TREE_OPERAND (arg0, 1),
7223 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
7224 convert (TREE_TYPE (arg0),
7225 integer_one_node)),
7226 arg1));
7227 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7228 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7229 return
7230 fold (build (code, type,
7231 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7232 build (RSHIFT_EXPR,
7233 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7234 TREE_OPERAND (arg0, 0),
7235 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
7236 convert (TREE_TYPE (arg0),
7237 integer_one_node)),
7238 arg1));
7239 }
7240
7241 /* If this is an NE or EQ comparison of zero against the result of a
7242 signed MOD operation whose second operand is a power of 2, make
7243 the MOD operation unsigned since it is simpler and equivalent. */
7244 if ((code == NE_EXPR || code == EQ_EXPR)
7245 && integer_zerop (arg1)
7246 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
7247 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7248 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7249 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7250 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7251 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7252 {
7253 tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
7254 tree newmod = build (TREE_CODE (arg0), newtype,
7255 convert (newtype, TREE_OPERAND (arg0, 0)),
7256 convert (newtype, TREE_OPERAND (arg0, 1)));
7257
7258 return build (code, type, newmod, convert (newtype, arg1));
7259 }
7260
7261 /* If this is an NE comparison of zero with an AND of one, remove the
7262 comparison since the AND will give the correct value. */
7263 if (code == NE_EXPR && integer_zerop (arg1)
7264 && TREE_CODE (arg0) == BIT_AND_EXPR
7265 && integer_onep (TREE_OPERAND (arg0, 1)))
7266 return convert (type, arg0);
7267
7268 /* If we have (A & C) == C where C is a power of 2, convert this into
7269 (A & C) != 0. Similarly for NE_EXPR. */
7270 if ((code == EQ_EXPR || code == NE_EXPR)
7271 && TREE_CODE (arg0) == BIT_AND_EXPR
7272 && integer_pow2p (TREE_OPERAND (arg0, 1))
7273 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7274 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7275 arg0, integer_zero_node));
7276
7277 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7278 2, then fold the expression into shifts and logical operations. */
7279 tem = fold_single_bit_test (code, arg0, arg1, type);
7280 if (tem)
7281 return tem;
7282
7283 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7284 Similarly for NE_EXPR. */
7285 if ((code == EQ_EXPR || code == NE_EXPR)
7286 && TREE_CODE (arg0) == BIT_AND_EXPR
7287 && TREE_CODE (arg1) == INTEGER_CST
7288 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7289 {
7290 tree dandnotc = fold (build (BIT_ANDTC_EXPR, TREE_TYPE (arg0),
7291 arg1, TREE_OPERAND (arg0, 1)));
7292 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7293 if (!integer_zerop (dandnotc))
7294 return omit_one_operand (type, rslt, arg0);
7295 }
7296
7297 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7298 Similarly for NE_EXPR. */
7299 if ((code == EQ_EXPR || code == NE_EXPR)
7300 && TREE_CODE (arg0) == BIT_IOR_EXPR
7301 && TREE_CODE (arg1) == INTEGER_CST
7302 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7303 {
7304 tree candnotd = fold (build (BIT_ANDTC_EXPR, TREE_TYPE (arg0),
7305 TREE_OPERAND (arg0, 1), arg1));
7306 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7307 if (!integer_zerop (candnotd))
7308 return omit_one_operand (type, rslt, arg0);
7309 }
7310
7311 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7312 and similarly for >= into !=. */
7313 if ((code == LT_EXPR || code == GE_EXPR)
7314 && TREE_UNSIGNED (TREE_TYPE (arg0))
7315 && TREE_CODE (arg1) == LSHIFT_EXPR
7316 && integer_onep (TREE_OPERAND (arg1, 0)))
7317 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7318 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7319 TREE_OPERAND (arg1, 1)),
7320 convert (TREE_TYPE (arg0), integer_zero_node));
7321
7322 else if ((code == LT_EXPR || code == GE_EXPR)
7323 && TREE_UNSIGNED (TREE_TYPE (arg0))
7324 && (TREE_CODE (arg1) == NOP_EXPR
7325 || TREE_CODE (arg1) == CONVERT_EXPR)
7326 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7327 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7328 return
7329 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7330 convert (TREE_TYPE (arg0),
7331 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7332 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
7333 convert (TREE_TYPE (arg0), integer_zero_node));
7334
7335 /* Simplify comparison of something with itself. (For IEEE
7336 floating-point, we can only do some of these simplifications.) */
7337 if (operand_equal_p (arg0, arg1, 0))
7338 {
7339 switch (code)
7340 {
7341 case EQ_EXPR:
7342 case GE_EXPR:
7343 case LE_EXPR:
7344 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7345 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7346 return constant_boolean_node (1, type);
7347 code = EQ_EXPR;
7348 if (t == orig_t)
7349 t = copy_node (t);
7350 TREE_SET_CODE (t, code);
7351 break;
7352
7353 case NE_EXPR:
7354 /* For NE, we can only do this simplification if integer
7355 or we don't honor IEEE floating point NaNs. */
7356 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7357 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7358 break;
7359 /* ... fall through ... */
7360 case GT_EXPR:
7361 case LT_EXPR:
7362 return constant_boolean_node (0, type);
7363 default:
7364 abort ();
7365 }
7366 }
7367
7368 /* If we are comparing an expression that just has comparisons
7369 of two integer values, arithmetic expressions of those comparisons,
7370 and constants, we can simplify it. There are only three cases
7371 to check: the two values can either be equal, the first can be
7372 greater, or the second can be greater. Fold the expression for
7373 those three values. Since each value must be 0 or 1, we have
7374 eight possibilities, each of which corresponds to the constant 0
7375 or 1 or one of the six possible comparisons.
7376
7377 This handles common cases like (a > b) == 0 but also handles
7378 expressions like ((x > y) - (y > x)) > 0, which supposedly
7379 occur in macroized code. */
7380
7381 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7382 {
7383 tree cval1 = 0, cval2 = 0;
7384 int save_p = 0;
7385
7386 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7387 /* Don't handle degenerate cases here; they should already
7388 have been handled anyway. */
7389 && cval1 != 0 && cval2 != 0
7390 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7391 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7392 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7393 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7394 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7395 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7396 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7397 {
7398 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7399 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7400
7401 /* We can't just pass T to eval_subst in case cval1 or cval2
7402 was the same as ARG1. */
7403
7404 tree high_result
7405 = fold (build (code, type,
7406 eval_subst (arg0, cval1, maxval, cval2, minval),
7407 arg1));
7408 tree equal_result
7409 = fold (build (code, type,
7410 eval_subst (arg0, cval1, maxval, cval2, maxval),
7411 arg1));
7412 tree low_result
7413 = fold (build (code, type,
7414 eval_subst (arg0, cval1, minval, cval2, maxval),
7415 arg1));
7416
7417 /* All three of these results should be 0 or 1. Confirm they
7418 are. Then use those values to select the proper code
7419 to use. */
7420
7421 if ((integer_zerop (high_result)
7422 || integer_onep (high_result))
7423 && (integer_zerop (equal_result)
7424 || integer_onep (equal_result))
7425 && (integer_zerop (low_result)
7426 || integer_onep (low_result)))
7427 {
7428 /* Make a 3-bit mask with the high-order bit being the
7429 value for `>', the next for '=', and the low for '<'. */
7430 switch ((integer_onep (high_result) * 4)
7431 + (integer_onep (equal_result) * 2)
7432 + integer_onep (low_result))
7433 {
7434 case 0:
7435 /* Always false. */
7436 return omit_one_operand (type, integer_zero_node, arg0);
7437 case 1:
7438 code = LT_EXPR;
7439 break;
7440 case 2:
7441 code = EQ_EXPR;
7442 break;
7443 case 3:
7444 code = LE_EXPR;
7445 break;
7446 case 4:
7447 code = GT_EXPR;
7448 break;
7449 case 5:
7450 code = NE_EXPR;
7451 break;
7452 case 6:
7453 code = GE_EXPR;
7454 break;
7455 case 7:
7456 /* Always true. */
7457 return omit_one_operand (type, integer_one_node, arg0);
7458 }
7459
7460 t = build (code, type, cval1, cval2);
7461 if (save_p)
7462 return save_expr (t);
7463 else
7464 return fold (t);
7465 }
7466 }
7467 }
7468
7469 /* If this is a comparison of a field, we may be able to simplify it. */
7470 if (((TREE_CODE (arg0) == COMPONENT_REF
7471 && (*lang_hooks.can_use_bit_fields_p) ())
7472 || TREE_CODE (arg0) == BIT_FIELD_REF)
7473 && (code == EQ_EXPR || code == NE_EXPR)
7474 /* Handle the constant case even without -O
7475 to make sure the warnings are given. */
7476 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
7477 {
7478 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
7479 return t1 ? t1 : t;
7480 }
7481
7482 /* If this is a comparison of complex values and either or both sides
7483 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7484 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7485 This may prevent needless evaluations. */
7486 if ((code == EQ_EXPR || code == NE_EXPR)
7487 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
7488 && (TREE_CODE (arg0) == COMPLEX_EXPR
7489 || TREE_CODE (arg1) == COMPLEX_EXPR
7490 || TREE_CODE (arg0) == COMPLEX_CST
7491 || TREE_CODE (arg1) == COMPLEX_CST))
7492 {
7493 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
7494 tree real0, imag0, real1, imag1;
7495
7496 arg0 = save_expr (arg0);
7497 arg1 = save_expr (arg1);
7498 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
7499 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
7500 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
7501 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
7502
7503 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
7504 : TRUTH_ORIF_EXPR),
7505 type,
7506 fold (build (code, type, real0, real1)),
7507 fold (build (code, type, imag0, imag1))));
7508 }
7509
7510 /* Optimize comparisons of strlen vs zero to a compare of the
7511 first character of the string vs zero. To wit,
7512 strlen(ptr) == 0 => *ptr == 0
7513 strlen(ptr) != 0 => *ptr != 0
7514 Other cases should reduce to one of these two (or a constant)
7515 due to the return value of strlen being unsigned. */
7516 if ((code == EQ_EXPR || code == NE_EXPR)
7517 && integer_zerop (arg1)
7518 && TREE_CODE (arg0) == CALL_EXPR
7519 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR)
7520 {
7521 tree fndecl = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7522 tree arglist;
7523
7524 if (TREE_CODE (fndecl) == FUNCTION_DECL
7525 && DECL_BUILT_IN (fndecl)
7526 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
7527 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
7528 && (arglist = TREE_OPERAND (arg0, 1))
7529 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
7530 && ! TREE_CHAIN (arglist))
7531 return fold (build (code, type,
7532 build1 (INDIRECT_REF, char_type_node,
7533 TREE_VALUE(arglist)),
7534 integer_zero_node));
7535 }
7536
7537 /* From here on, the only cases we handle are when the result is
7538 known to be a constant.
7539
7540 To compute GT, swap the arguments and do LT.
7541 To compute GE, do LT and invert the result.
7542 To compute LE, swap the arguments, do LT and invert the result.
7543 To compute NE, do EQ and invert the result.
7544
7545 Therefore, the code below must handle only EQ and LT. */
7546
7547 if (code == LE_EXPR || code == GT_EXPR)
7548 {
7549 tem = arg0, arg0 = arg1, arg1 = tem;
7550 code = swap_tree_comparison (code);
7551 }
7552
7553 /* Note that it is safe to invert for real values here because we
7554 will check below in the one case that it matters. */
7555
7556 t1 = NULL_TREE;
7557 invert = 0;
7558 if (code == NE_EXPR || code == GE_EXPR)
7559 {
7560 invert = 1;
7561 code = invert_tree_comparison (code);
7562 }
7563
7564 /* Compute a result for LT or EQ if args permit;
7565 otherwise return T. */
7566 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
7567 {
7568 if (code == EQ_EXPR)
7569 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
7570 else
7571 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
7572 ? INT_CST_LT_UNSIGNED (arg0, arg1)
7573 : INT_CST_LT (arg0, arg1)),
7574 0);
7575 }
7576
7577 #if 0 /* This is no longer useful, but breaks some real code. */
7578 /* Assume a nonexplicit constant cannot equal an explicit one,
7579 since such code would be undefined anyway.
7580 Exception: on sysvr4, using #pragma weak,
7581 a label can come out as 0. */
7582 else if (TREE_CODE (arg1) == INTEGER_CST
7583 && !integer_zerop (arg1)
7584 && TREE_CONSTANT (arg0)
7585 && TREE_CODE (arg0) == ADDR_EXPR
7586 && code == EQ_EXPR)
7587 t1 = build_int_2 (0, 0);
7588 #endif
7589 /* Two real constants can be compared explicitly. */
7590 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
7591 {
7592 /* If either operand is a NaN, the result is false with two
7593 exceptions: First, an NE_EXPR is true on NaNs, but that case
7594 is already handled correctly since we will be inverting the
7595 result for NE_EXPR. Second, if we had inverted a LE_EXPR
7596 or a GE_EXPR into a LT_EXPR, we must return true so that it
7597 will be inverted into false. */
7598
7599 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
7600 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
7601 t1 = build_int_2 (invert && code == LT_EXPR, 0);
7602
7603 else if (code == EQ_EXPR)
7604 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
7605 TREE_REAL_CST (arg1)),
7606 0);
7607 else
7608 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
7609 TREE_REAL_CST (arg1)),
7610 0);
7611 }
7612
7613 if (t1 == NULL_TREE)
7614 return t;
7615
7616 if (invert)
7617 TREE_INT_CST_LOW (t1) ^= 1;
7618
7619 TREE_TYPE (t1) = type;
7620 if (TREE_CODE (type) == BOOLEAN_TYPE)
7621 return (*lang_hooks.truthvalue_conversion) (t1);
7622 return t1;
7623
7624 case COND_EXPR:
7625 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
7626 so all simple results must be passed through pedantic_non_lvalue. */
7627 if (TREE_CODE (arg0) == INTEGER_CST)
7628 return pedantic_non_lvalue
7629 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
7630 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
7631 return pedantic_omit_one_operand (type, arg1, arg0);
7632
7633 /* If the second operand is zero, invert the comparison and swap
7634 the second and third operands. Likewise if the second operand
7635 is constant and the third is not or if the third operand is
7636 equivalent to the first operand of the comparison. */
7637
7638 if (integer_zerop (arg1)
7639 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
7640 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7641 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7642 TREE_OPERAND (t, 2),
7643 TREE_OPERAND (arg0, 1))))
7644 {
7645 /* See if this can be inverted. If it can't, possibly because
7646 it was a floating-point inequality comparison, don't do
7647 anything. */
7648 tem = invert_truthvalue (arg0);
7649
7650 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7651 {
7652 t = build (code, type, tem,
7653 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
7654 arg0 = tem;
7655 /* arg1 should be the first argument of the new T. */
7656 arg1 = TREE_OPERAND (t, 1);
7657 STRIP_NOPS (arg1);
7658 }
7659 }
7660
7661 /* If we have A op B ? A : C, we may be able to convert this to a
7662 simpler expression, depending on the operation and the values
7663 of B and C. Signed zeros prevent all of these transformations,
7664 for reasons given above each one. */
7665
7666 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7667 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7668 arg1, TREE_OPERAND (arg0, 1))
7669 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
7670 {
7671 tree arg2 = TREE_OPERAND (t, 2);
7672 enum tree_code comp_code = TREE_CODE (arg0);
7673
7674 STRIP_NOPS (arg2);
7675
7676 /* If we have A op 0 ? A : -A, consider applying the following
7677 transformations:
7678
7679 A == 0? A : -A same as -A
7680 A != 0? A : -A same as A
7681 A >= 0? A : -A same as abs (A)
7682 A > 0? A : -A same as abs (A)
7683 A <= 0? A : -A same as -abs (A)
7684 A < 0? A : -A same as -abs (A)
7685
7686 None of these transformations work for modes with signed
7687 zeros. If A is +/-0, the first two transformations will
7688 change the sign of the result (from +0 to -0, or vice
7689 versa). The last four will fix the sign of the result,
7690 even though the original expressions could be positive or
7691 negative, depending on the sign of A.
7692
7693 Note that all these transformations are correct if A is
7694 NaN, since the two alternatives (A and -A) are also NaNs. */
7695 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
7696 ? real_zerop (TREE_OPERAND (arg0, 1))
7697 : integer_zerop (TREE_OPERAND (arg0, 1)))
7698 && TREE_CODE (arg2) == NEGATE_EXPR
7699 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
7700 switch (comp_code)
7701 {
7702 case EQ_EXPR:
7703 return
7704 pedantic_non_lvalue
7705 (convert (type,
7706 negate_expr
7707 (convert (TREE_TYPE (TREE_OPERAND (t, 1)),
7708 arg1))));
7709 case NE_EXPR:
7710 return pedantic_non_lvalue (convert (type, arg1));
7711 case GE_EXPR:
7712 case GT_EXPR:
7713 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7714 arg1 = convert ((*lang_hooks.types.signed_type)
7715 (TREE_TYPE (arg1)), arg1);
7716 return pedantic_non_lvalue
7717 (convert (type, fold (build1 (ABS_EXPR,
7718 TREE_TYPE (arg1), arg1))));
7719 case LE_EXPR:
7720 case LT_EXPR:
7721 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7722 arg1 = convert ((lang_hooks.types.signed_type)
7723 (TREE_TYPE (arg1)), arg1);
7724 return pedantic_non_lvalue
7725 (negate_expr (convert (type,
7726 fold (build1 (ABS_EXPR,
7727 TREE_TYPE (arg1),
7728 arg1)))));
7729 default:
7730 abort ();
7731 }
7732
7733 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
7734 A == 0 ? A : 0 is always 0 unless A is -0. Note that
7735 both transformations are correct when A is NaN: A != 0
7736 is then true, and A == 0 is false. */
7737
7738 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
7739 {
7740 if (comp_code == NE_EXPR)
7741 return pedantic_non_lvalue (convert (type, arg1));
7742 else if (comp_code == EQ_EXPR)
7743 return pedantic_non_lvalue (convert (type, integer_zero_node));
7744 }
7745
7746 /* Try some transformations of A op B ? A : B.
7747
7748 A == B? A : B same as B
7749 A != B? A : B same as A
7750 A >= B? A : B same as max (A, B)
7751 A > B? A : B same as max (B, A)
7752 A <= B? A : B same as min (A, B)
7753 A < B? A : B same as min (B, A)
7754
7755 As above, these transformations don't work in the presence
7756 of signed zeros. For example, if A and B are zeros of
7757 opposite sign, the first two transformations will change
7758 the sign of the result. In the last four, the original
7759 expressions give different results for (A=+0, B=-0) and
7760 (A=-0, B=+0), but the transformed expressions do not.
7761
7762 The first two transformations are correct if either A or B
7763 is a NaN. In the first transformation, the condition will
7764 be false, and B will indeed be chosen. In the case of the
7765 second transformation, the condition A != B will be true,
7766 and A will be chosen.
7767
7768 The conversions to max() and min() are not correct if B is
7769 a number and A is not. The conditions in the original
7770 expressions will be false, so all four give B. The min()
7771 and max() versions would give a NaN instead. */
7772 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
7773 arg2, TREE_OPERAND (arg0, 0)))
7774 {
7775 tree comp_op0 = TREE_OPERAND (arg0, 0);
7776 tree comp_op1 = TREE_OPERAND (arg0, 1);
7777 tree comp_type = TREE_TYPE (comp_op0);
7778
7779 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
7780 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
7781 {
7782 comp_type = type;
7783 comp_op0 = arg1;
7784 comp_op1 = arg2;
7785 }
7786
7787 switch (comp_code)
7788 {
7789 case EQ_EXPR:
7790 return pedantic_non_lvalue (convert (type, arg2));
7791 case NE_EXPR:
7792 return pedantic_non_lvalue (convert (type, arg1));
7793 case LE_EXPR:
7794 case LT_EXPR:
7795 /* In C++ a ?: expression can be an lvalue, so put the
7796 operand which will be used if they are equal first
7797 so that we can convert this back to the
7798 corresponding COND_EXPR. */
7799 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
7800 return pedantic_non_lvalue
7801 (convert (type, fold (build (MIN_EXPR, comp_type,
7802 (comp_code == LE_EXPR
7803 ? comp_op0 : comp_op1),
7804 (comp_code == LE_EXPR
7805 ? comp_op1 : comp_op0)))));
7806 break;
7807 case GE_EXPR:
7808 case GT_EXPR:
7809 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
7810 return pedantic_non_lvalue
7811 (convert (type, fold (build (MAX_EXPR, comp_type,
7812 (comp_code == GE_EXPR
7813 ? comp_op0 : comp_op1),
7814 (comp_code == GE_EXPR
7815 ? comp_op1 : comp_op0)))));
7816 break;
7817 default:
7818 abort ();
7819 }
7820 }
7821
7822 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
7823 we might still be able to simplify this. For example,
7824 if C1 is one less or one more than C2, this might have started
7825 out as a MIN or MAX and been transformed by this function.
7826 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
7827
7828 if (INTEGRAL_TYPE_P (type)
7829 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7830 && TREE_CODE (arg2) == INTEGER_CST)
7831 switch (comp_code)
7832 {
7833 case EQ_EXPR:
7834 /* We can replace A with C1 in this case. */
7835 arg1 = convert (type, TREE_OPERAND (arg0, 1));
7836 t = build (code, type, TREE_OPERAND (t, 0), arg1,
7837 TREE_OPERAND (t, 2));
7838 break;
7839
7840 case LT_EXPR:
7841 /* If C1 is C2 + 1, this is min(A, C2). */
7842 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
7843 && operand_equal_p (TREE_OPERAND (arg0, 1),
7844 const_binop (PLUS_EXPR, arg2,
7845 integer_one_node, 0), 1))
7846 return pedantic_non_lvalue
7847 (fold (build (MIN_EXPR, type, arg1, arg2)));
7848 break;
7849
7850 case LE_EXPR:
7851 /* If C1 is C2 - 1, this is min(A, C2). */
7852 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
7853 && operand_equal_p (TREE_OPERAND (arg0, 1),
7854 const_binop (MINUS_EXPR, arg2,
7855 integer_one_node, 0), 1))
7856 return pedantic_non_lvalue
7857 (fold (build (MIN_EXPR, type, arg1, arg2)));
7858 break;
7859
7860 case GT_EXPR:
7861 /* If C1 is C2 - 1, this is max(A, C2). */
7862 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
7863 && operand_equal_p (TREE_OPERAND (arg0, 1),
7864 const_binop (MINUS_EXPR, arg2,
7865 integer_one_node, 0), 1))
7866 return pedantic_non_lvalue
7867 (fold (build (MAX_EXPR, type, arg1, arg2)));
7868 break;
7869
7870 case GE_EXPR:
7871 /* If C1 is C2 + 1, this is max(A, C2). */
7872 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
7873 && operand_equal_p (TREE_OPERAND (arg0, 1),
7874 const_binop (PLUS_EXPR, arg2,
7875 integer_one_node, 0), 1))
7876 return pedantic_non_lvalue
7877 (fold (build (MAX_EXPR, type, arg1, arg2)));
7878 break;
7879 case NE_EXPR:
7880 break;
7881 default:
7882 abort ();
7883 }
7884 }
7885
7886 /* If the second operand is simpler than the third, swap them
7887 since that produces better jump optimization results. */
7888 if ((TREE_CONSTANT (arg1) || DECL_P (arg1)
7889 || TREE_CODE (arg1) == SAVE_EXPR)
7890 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
7891 || DECL_P (TREE_OPERAND (t, 2))
7892 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
7893 {
7894 /* See if this can be inverted. If it can't, possibly because
7895 it was a floating-point inequality comparison, don't do
7896 anything. */
7897 tem = invert_truthvalue (arg0);
7898
7899 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7900 {
7901 t = build (code, type, tem,
7902 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
7903 arg0 = tem;
7904 /* arg1 should be the first argument of the new T. */
7905 arg1 = TREE_OPERAND (t, 1);
7906 STRIP_NOPS (arg1);
7907 }
7908 }
7909
7910 /* Convert A ? 1 : 0 to simply A. */
7911 if (integer_onep (TREE_OPERAND (t, 1))
7912 && integer_zerop (TREE_OPERAND (t, 2))
7913 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
7914 call to fold will try to move the conversion inside
7915 a COND, which will recurse. In that case, the COND_EXPR
7916 is probably the best choice, so leave it alone. */
7917 && type == TREE_TYPE (arg0))
7918 return pedantic_non_lvalue (arg0);
7919
7920 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
7921 over COND_EXPR in cases such as floating point comparisons. */
7922 if (integer_zerop (TREE_OPERAND (t, 1))
7923 && integer_onep (TREE_OPERAND (t, 2))
7924 && truth_value_p (TREE_CODE (arg0)))
7925 return pedantic_non_lvalue (convert (type,
7926 invert_truthvalue (arg0)));
7927
7928 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
7929 operation is simply A & 2. */
7930
7931 if (integer_zerop (TREE_OPERAND (t, 2))
7932 && TREE_CODE (arg0) == NE_EXPR
7933 && integer_zerop (TREE_OPERAND (arg0, 1))
7934 && integer_pow2p (arg1)
7935 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
7936 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
7937 arg1, 1))
7938 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
7939
7940 /* Convert A ? B : 0 into A && B if A and B are truth values. */
7941 if (integer_zerop (TREE_OPERAND (t, 2))
7942 && truth_value_p (TREE_CODE (arg0))
7943 && truth_value_p (TREE_CODE (arg1)))
7944 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
7945 arg0, arg1)));
7946
7947 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
7948 if (integer_onep (TREE_OPERAND (t, 2))
7949 && truth_value_p (TREE_CODE (arg0))
7950 && truth_value_p (TREE_CODE (arg1)))
7951 {
7952 /* Only perform transformation if ARG0 is easily inverted. */
7953 tem = invert_truthvalue (arg0);
7954 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7955 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
7956 tem, arg1)));
7957 }
7958
7959 return t;
7960
7961 case COMPOUND_EXPR:
7962 /* When pedantic, a compound expression can be neither an lvalue
7963 nor an integer constant expression. */
7964 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
7965 return t;
7966 /* Don't let (0, 0) be null pointer constant. */
7967 if (integer_zerop (arg1))
7968 return build1 (NOP_EXPR, type, arg1);
7969 return convert (type, arg1);
7970
7971 case COMPLEX_EXPR:
7972 if (wins)
7973 return build_complex (type, arg0, arg1);
7974 return t;
7975
7976 case REALPART_EXPR:
7977 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7978 return t;
7979 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7980 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7981 TREE_OPERAND (arg0, 1));
7982 else if (TREE_CODE (arg0) == COMPLEX_CST)
7983 return TREE_REALPART (arg0);
7984 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7985 return fold (build (TREE_CODE (arg0), type,
7986 fold (build1 (REALPART_EXPR, type,
7987 TREE_OPERAND (arg0, 0))),
7988 fold (build1 (REALPART_EXPR,
7989 type, TREE_OPERAND (arg0, 1)))));
7990 return t;
7991
7992 case IMAGPART_EXPR:
7993 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7994 return convert (type, integer_zero_node);
7995 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7996 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7997 TREE_OPERAND (arg0, 0));
7998 else if (TREE_CODE (arg0) == COMPLEX_CST)
7999 return TREE_IMAGPART (arg0);
8000 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8001 return fold (build (TREE_CODE (arg0), type,
8002 fold (build1 (IMAGPART_EXPR, type,
8003 TREE_OPERAND (arg0, 0))),
8004 fold (build1 (IMAGPART_EXPR, type,
8005 TREE_OPERAND (arg0, 1)))));
8006 return t;
8007
8008 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8009 appropriate. */
8010 case CLEANUP_POINT_EXPR:
8011 if (! has_cleanups (arg0))
8012 return TREE_OPERAND (t, 0);
8013
8014 {
8015 enum tree_code code0 = TREE_CODE (arg0);
8016 int kind0 = TREE_CODE_CLASS (code0);
8017 tree arg00 = TREE_OPERAND (arg0, 0);
8018 tree arg01;
8019
8020 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8021 return fold (build1 (code0, type,
8022 fold (build1 (CLEANUP_POINT_EXPR,
8023 TREE_TYPE (arg00), arg00))));
8024
8025 if (kind0 == '<' || kind0 == '2'
8026 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8027 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8028 || code0 == TRUTH_XOR_EXPR)
8029 {
8030 arg01 = TREE_OPERAND (arg0, 1);
8031
8032 if (TREE_CONSTANT (arg00)
8033 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8034 && ! has_cleanups (arg00)))
8035 return fold (build (code0, type, arg00,
8036 fold (build1 (CLEANUP_POINT_EXPR,
8037 TREE_TYPE (arg01), arg01))));
8038
8039 if (TREE_CONSTANT (arg01))
8040 return fold (build (code0, type,
8041 fold (build1 (CLEANUP_POINT_EXPR,
8042 TREE_TYPE (arg00), arg00)),
8043 arg01));
8044 }
8045
8046 return t;
8047 }
8048
8049 case CALL_EXPR:
8050 /* Check for a built-in function. */
8051 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
8052 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
8053 == FUNCTION_DECL)
8054 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
8055 {
8056 tree tmp = fold_builtin (expr);
8057 if (tmp)
8058 return tmp;
8059 }
8060 return t;
8061
8062 default:
8063 return t;
8064 } /* switch (code) */
8065 }
8066
8067 #ifdef ENABLE_FOLD_CHECKING
8068 #undef fold
8069
8070 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8071 static void fold_check_failed (tree, tree);
8072 void print_fold_checksum (tree);
8073
8074 /* When --enable-checking=fold, compute a digest of expr before
8075 and after actual fold call to see if fold did not accidentally
8076 change original expr. */
8077
8078 tree
8079 fold (tree expr)
8080 {
8081 tree ret;
8082 struct md5_ctx ctx;
8083 unsigned char checksum_before[16], checksum_after[16];
8084 htab_t ht;
8085
8086 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8087 md5_init_ctx (&ctx);
8088 fold_checksum_tree (expr, &ctx, ht);
8089 md5_finish_ctx (&ctx, checksum_before);
8090 htab_empty (ht);
8091
8092 ret = fold_1 (expr);
8093
8094 md5_init_ctx (&ctx);
8095 fold_checksum_tree (expr, &ctx, ht);
8096 md5_finish_ctx (&ctx, checksum_after);
8097 htab_delete (ht);
8098
8099 if (memcmp (checksum_before, checksum_after, 16))
8100 fold_check_failed (expr, ret);
8101
8102 return ret;
8103 }
8104
8105 void
8106 print_fold_checksum (tree expr)
8107 {
8108 struct md5_ctx ctx;
8109 unsigned char checksum[16], cnt;
8110 htab_t ht;
8111
8112 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8113 md5_init_ctx (&ctx);
8114 fold_checksum_tree (expr, &ctx, ht);
8115 md5_finish_ctx (&ctx, checksum);
8116 htab_delete (ht);
8117 for (cnt = 0; cnt < 16; ++cnt)
8118 fprintf (stderr, "%02x", checksum[cnt]);
8119 putc ('\n', stderr);
8120 }
8121
8122 static void
8123 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8124 {
8125 internal_error ("fold check: original tree changed by fold");
8126 }
8127
8128 static void
8129 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8130 {
8131 void **slot;
8132 enum tree_code code;
8133 char buf[sizeof (struct tree_decl)];
8134 int i, len;
8135
8136 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8137 > sizeof (struct tree_decl)
8138 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8139 abort ();
8140 if (expr == NULL)
8141 return;
8142 slot = htab_find_slot (ht, expr, INSERT);
8143 if (*slot != NULL)
8144 return;
8145 *slot = expr;
8146 code = TREE_CODE (expr);
8147 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8148 {
8149 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8150 memcpy (buf, expr, tree_size (expr));
8151 expr = (tree) buf;
8152 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8153 }
8154 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8155 {
8156 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8157 memcpy (buf, expr, tree_size (expr));
8158 expr = (tree) buf;
8159 SET_DECL_ASSEMBLER_NAME (expr, NULL);
8160 }
8161 else if (TREE_CODE_CLASS (code) == 't'
8162 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8163 {
8164 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8165 memcpy (buf, expr, tree_size (expr));
8166 expr = (tree) buf;
8167 TYPE_POINTER_TO (expr) = NULL;
8168 TYPE_REFERENCE_TO (expr) = NULL;
8169 }
8170 md5_process_bytes (expr, tree_size (expr), ctx);
8171 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8172 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8173 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8174 len = TREE_CODE_LENGTH (code);
8175 switch (TREE_CODE_CLASS (code))
8176 {
8177 case 'c':
8178 switch (code)
8179 {
8180 case STRING_CST:
8181 md5_process_bytes (TREE_STRING_POINTER (expr),
8182 TREE_STRING_LENGTH (expr), ctx);
8183 break;
8184 case COMPLEX_CST:
8185 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8186 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8187 break;
8188 case VECTOR_CST:
8189 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8190 break;
8191 default:
8192 break;
8193 }
8194 break;
8195 case 'x':
8196 switch (code)
8197 {
8198 case TREE_LIST:
8199 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8200 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8201 break;
8202 case TREE_VEC:
8203 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8204 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8205 break;
8206 default:
8207 break;
8208 }
8209 break;
8210 case 'e':
8211 switch (code)
8212 {
8213 case SAVE_EXPR: len = 2; break;
8214 case GOTO_SUBROUTINE_EXPR: len = 0; break;
8215 case RTL_EXPR: len = 0; break;
8216 case WITH_CLEANUP_EXPR: len = 2; break;
8217 default: break;
8218 }
8219 /* FALLTHROUGH */
8220 case 'r':
8221 case '<':
8222 case '1':
8223 case '2':
8224 case 's':
8225 for (i = 0; i < len; ++i)
8226 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8227 break;
8228 case 'd':
8229 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8230 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8231 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8232 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8233 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8234 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8235 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8236 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8237 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8238 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8239 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8240 break;
8241 case 't':
8242 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
8243 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
8244 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
8245 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
8246 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
8247 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
8248 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
8249 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
8250 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
8251 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
8252 break;
8253 default:
8254 break;
8255 }
8256 }
8257
8258 #endif
8259
8260 /* Perform constant folding and related simplification of intializer
8261 expression EXPR. This behaves identically to "fold" but ignores
8262 potential run-time traps and exceptions that fold must preserve. */
8263
8264 tree
8265 fold_initializer (tree expr)
8266 {
8267 int saved_signaling_nans = flag_signaling_nans;
8268 int saved_trapping_math = flag_trapping_math;
8269 int saved_trapv = flag_trapv;
8270 tree result;
8271
8272 flag_signaling_nans = 0;
8273 flag_trapping_math = 0;
8274 flag_trapv = 0;
8275
8276 result = fold (expr);
8277
8278 flag_signaling_nans = saved_signaling_nans;
8279 flag_trapping_math = saved_trapping_math;
8280 flag_trapv = saved_trapv;
8281
8282 return result;
8283 }
8284
8285 /* Determine if first argument is a multiple of second argument. Return 0 if
8286 it is not, or we cannot easily determined it to be.
8287
8288 An example of the sort of thing we care about (at this point; this routine
8289 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8290 fold cases do now) is discovering that
8291
8292 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8293
8294 is a multiple of
8295
8296 SAVE_EXPR (J * 8)
8297
8298 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8299
8300 This code also handles discovering that
8301
8302 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8303
8304 is a multiple of 8 so we don't have to worry about dealing with a
8305 possible remainder.
8306
8307 Note that we *look* inside a SAVE_EXPR only to determine how it was
8308 calculated; it is not safe for fold to do much of anything else with the
8309 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8310 at run time. For example, the latter example above *cannot* be implemented
8311 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8312 evaluation time of the original SAVE_EXPR is not necessarily the same at
8313 the time the new expression is evaluated. The only optimization of this
8314 sort that would be valid is changing
8315
8316 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8317
8318 divided by 8 to
8319
8320 SAVE_EXPR (I) * SAVE_EXPR (J)
8321
8322 (where the same SAVE_EXPR (J) is used in the original and the
8323 transformed version). */
8324
8325 static int
8326 multiple_of_p (tree type, tree top, tree bottom)
8327 {
8328 if (operand_equal_p (top, bottom, 0))
8329 return 1;
8330
8331 if (TREE_CODE (type) != INTEGER_TYPE)
8332 return 0;
8333
8334 switch (TREE_CODE (top))
8335 {
8336 case MULT_EXPR:
8337 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8338 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8339
8340 case PLUS_EXPR:
8341 case MINUS_EXPR:
8342 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8343 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8344
8345 case LSHIFT_EXPR:
8346 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
8347 {
8348 tree op1, t1;
8349
8350 op1 = TREE_OPERAND (top, 1);
8351 /* const_binop may not detect overflow correctly,
8352 so check for it explicitly here. */
8353 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
8354 > TREE_INT_CST_LOW (op1)
8355 && TREE_INT_CST_HIGH (op1) == 0
8356 && 0 != (t1 = convert (type,
8357 const_binop (LSHIFT_EXPR, size_one_node,
8358 op1, 0)))
8359 && ! TREE_OVERFLOW (t1))
8360 return multiple_of_p (type, t1, bottom);
8361 }
8362 return 0;
8363
8364 case NOP_EXPR:
8365 /* Can't handle conversions from non-integral or wider integral type. */
8366 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
8367 || (TYPE_PRECISION (type)
8368 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
8369 return 0;
8370
8371 /* .. fall through ... */
8372
8373 case SAVE_EXPR:
8374 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
8375
8376 case INTEGER_CST:
8377 if (TREE_CODE (bottom) != INTEGER_CST
8378 || (TREE_UNSIGNED (type)
8379 && (tree_int_cst_sgn (top) < 0
8380 || tree_int_cst_sgn (bottom) < 0)))
8381 return 0;
8382 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
8383 top, bottom, 0));
8384
8385 default:
8386 return 0;
8387 }
8388 }
8389
8390 /* Return true if `t' is known to be non-negative. */
8391
8392 int
8393 tree_expr_nonnegative_p (tree t)
8394 {
8395 switch (TREE_CODE (t))
8396 {
8397 case ABS_EXPR:
8398 case FFS_EXPR:
8399 case POPCOUNT_EXPR:
8400 case PARITY_EXPR:
8401 return 1;
8402
8403 case CLZ_EXPR:
8404 case CTZ_EXPR:
8405 /* These are undefined at zero. This is true even if
8406 C[LT]Z_DEFINED_VALUE_AT_ZERO is set, since what we're
8407 computing here is a user-visible property. */
8408 return 0;
8409
8410 case INTEGER_CST:
8411 return tree_int_cst_sgn (t) >= 0;
8412
8413 case REAL_CST:
8414 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
8415
8416 case PLUS_EXPR:
8417 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8418 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8419 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8420
8421 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8422 both unsigned and at least 2 bits shorter than the result. */
8423 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8424 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8425 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8426 {
8427 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8428 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8429 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8430 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8431 {
8432 unsigned int prec = MAX (TYPE_PRECISION (inner1),
8433 TYPE_PRECISION (inner2)) + 1;
8434 return prec < TYPE_PRECISION (TREE_TYPE (t));
8435 }
8436 }
8437 break;
8438
8439 case MULT_EXPR:
8440 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8441 {
8442 /* x * x for floating point x is always non-negative. */
8443 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8444 return 1;
8445 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8446 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8447 }
8448
8449 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8450 both unsigned and their total bits is shorter than the result. */
8451 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8452 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8453 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8454 {
8455 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8456 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8457 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8458 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8459 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8460 < TYPE_PRECISION (TREE_TYPE (t));
8461 }
8462 return 0;
8463
8464 case TRUNC_DIV_EXPR:
8465 case CEIL_DIV_EXPR:
8466 case FLOOR_DIV_EXPR:
8467 case ROUND_DIV_EXPR:
8468 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8469 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8470
8471 case TRUNC_MOD_EXPR:
8472 case CEIL_MOD_EXPR:
8473 case FLOOR_MOD_EXPR:
8474 case ROUND_MOD_EXPR:
8475 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8476
8477 case RDIV_EXPR:
8478 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8479 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8480
8481 case NOP_EXPR:
8482 {
8483 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8484 tree outer_type = TREE_TYPE (t);
8485
8486 if (TREE_CODE (outer_type) == REAL_TYPE)
8487 {
8488 if (TREE_CODE (inner_type) == REAL_TYPE)
8489 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8490 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8491 {
8492 if (TREE_UNSIGNED (inner_type))
8493 return 1;
8494 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8495 }
8496 }
8497 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8498 {
8499 if (TREE_CODE (inner_type) == REAL_TYPE)
8500 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8501 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8502 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8503 && TREE_UNSIGNED (inner_type);
8504 }
8505 }
8506 break;
8507
8508 case COND_EXPR:
8509 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8510 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
8511 case COMPOUND_EXPR:
8512 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8513 case MIN_EXPR:
8514 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8515 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8516 case MAX_EXPR:
8517 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8518 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8519 case MODIFY_EXPR:
8520 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8521 case BIND_EXPR:
8522 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8523 case SAVE_EXPR:
8524 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8525 case NON_LVALUE_EXPR:
8526 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8527 case FLOAT_EXPR:
8528 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8529 case RTL_EXPR:
8530 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
8531
8532 case CALL_EXPR:
8533 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
8534 {
8535 tree fndecl = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
8536 tree arglist = TREE_OPERAND (t, 1);
8537 if (TREE_CODE (fndecl) == FUNCTION_DECL
8538 && DECL_BUILT_IN (fndecl)
8539 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
8540 switch (DECL_FUNCTION_CODE (fndecl))
8541 {
8542 case BUILT_IN_CABS:
8543 case BUILT_IN_CABSL:
8544 case BUILT_IN_CABSF:
8545 case BUILT_IN_EXP:
8546 case BUILT_IN_EXPF:
8547 case BUILT_IN_EXPL:
8548 case BUILT_IN_FABS:
8549 case BUILT_IN_FABSF:
8550 case BUILT_IN_FABSL:
8551 case BUILT_IN_SQRT:
8552 case BUILT_IN_SQRTF:
8553 case BUILT_IN_SQRTL:
8554 return 1;
8555
8556 case BUILT_IN_ATAN:
8557 case BUILT_IN_ATANF:
8558 case BUILT_IN_ATANL:
8559 case BUILT_IN_CEIL:
8560 case BUILT_IN_CEILF:
8561 case BUILT_IN_CEILL:
8562 case BUILT_IN_FLOOR:
8563 case BUILT_IN_FLOORF:
8564 case BUILT_IN_FLOORL:
8565 case BUILT_IN_NEARBYINT:
8566 case BUILT_IN_NEARBYINTF:
8567 case BUILT_IN_NEARBYINTL:
8568 case BUILT_IN_ROUND:
8569 case BUILT_IN_ROUNDF:
8570 case BUILT_IN_ROUNDL:
8571 case BUILT_IN_TRUNC:
8572 case BUILT_IN_TRUNCF:
8573 case BUILT_IN_TRUNCL:
8574 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8575
8576 case BUILT_IN_POW:
8577 case BUILT_IN_POWF:
8578 case BUILT_IN_POWL:
8579 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8580
8581 default:
8582 break;
8583 }
8584 }
8585
8586 /* ... fall through ... */
8587
8588 default:
8589 if (truth_value_p (TREE_CODE (t)))
8590 /* Truth values evaluate to 0 or 1, which is nonnegative. */
8591 return 1;
8592 }
8593
8594 /* We don't know sign of `t', so be conservative and return false. */
8595 return 0;
8596 }
8597
8598 /* Return true if `r' is known to be non-negative.
8599 Only handles constants at the moment. */
8600
8601 int
8602 rtl_expr_nonnegative_p (rtx r)
8603 {
8604 switch (GET_CODE (r))
8605 {
8606 case CONST_INT:
8607 return INTVAL (r) >= 0;
8608
8609 case CONST_DOUBLE:
8610 if (GET_MODE (r) == VOIDmode)
8611 return CONST_DOUBLE_HIGH (r) >= 0;
8612 return 0;
8613
8614 case CONST_VECTOR:
8615 {
8616 int units, i;
8617 rtx elt;
8618
8619 units = CONST_VECTOR_NUNITS (r);
8620
8621 for (i = 0; i < units; ++i)
8622 {
8623 elt = CONST_VECTOR_ELT (r, i);
8624 if (!rtl_expr_nonnegative_p (elt))
8625 return 0;
8626 }
8627
8628 return 1;
8629 }
8630
8631 case SYMBOL_REF:
8632 case LABEL_REF:
8633 /* These are always nonnegative. */
8634 return 1;
8635
8636 default:
8637 return 0;
8638 }
8639 }
8640
8641 #include "gt-fold-const.h"