alias.c: Follow spelling conventions.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "real.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "toplev.h"
56 #include "ggc.h"
57 #include "hashtab.h"
58 #include "langhooks.h"
59 #include "md5.h"
60
61 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
62 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
63 static bool negate_expr_p (tree);
64 static tree negate_expr (tree);
65 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
66 static tree associate_trees (tree, tree, enum tree_code, tree);
67 static tree int_const_binop (enum tree_code, tree, tree, int);
68 static tree const_binop (enum tree_code, tree, tree, int);
69 static hashval_t size_htab_hash (const void *);
70 static int size_htab_eq (const void *, const void *);
71 static tree fold_convert (tree, tree);
72 static enum tree_code invert_tree_comparison (enum tree_code);
73 static enum tree_code swap_tree_comparison (enum tree_code);
74 static int comparison_to_compcode (enum tree_code);
75 static enum tree_code compcode_to_comparison (int);
76 static int truth_value_p (enum tree_code);
77 static int operand_equal_for_comparison_p (tree, tree, tree);
78 static int twoval_comparison_p (tree, tree *, tree *, int *);
79 static tree eval_subst (tree, tree, tree, tree, tree);
80 static tree pedantic_omit_one_operand (tree, tree, tree);
81 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
82 static tree make_bit_field_ref (tree, tree, int, int, int);
83 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
84 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
85 enum machine_mode *, int *, int *,
86 tree *, tree *);
87 static int all_ones_mask_p (tree, int);
88 static tree sign_bit_p (tree, tree);
89 static int simple_operand_p (tree);
90 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
91 static tree make_range (tree, int *, tree *, tree *);
92 static tree build_range_check (tree, tree, int, tree, tree);
93 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
94 tree);
95 static tree fold_range_test (tree);
96 static tree unextend (tree, int, int, tree);
97 static tree fold_truthop (enum tree_code, tree, tree, tree);
98 static tree optimize_minmax_comparison (tree);
99 static tree extract_muldiv (tree, tree, enum tree_code, tree);
100 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
101 static tree strip_compound_expr (tree, tree);
102 static int multiple_of_p (tree, tree, tree);
103 static tree constant_boolean_node (int, tree);
104 static int count_cond (tree, int);
105 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
106 tree, int);
107 static bool fold_real_zero_addition_p (tree, tree, int);
108 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
109 tree, tree, tree);
110 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
111
112 /* The following constants represent a bit based encoding of GCC's
113 comparison operators. This encoding simplifies transformations
114 on relational comparison operators, such as AND and OR. */
115 #define COMPCODE_FALSE 0
116 #define COMPCODE_LT 1
117 #define COMPCODE_EQ 2
118 #define COMPCODE_LE 3
119 #define COMPCODE_GT 4
120 #define COMPCODE_NE 5
121 #define COMPCODE_GE 6
122 #define COMPCODE_TRUE 7
123
124 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
125 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
126 and SUM1. Then this yields nonzero if overflow occurred during the
127 addition.
128
129 Overflow occurs if A and B have the same sign, but A and SUM differ in
130 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
131 sign. */
132 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
133 \f
134 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
135 We do that by representing the two-word integer in 4 words, with only
136 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
137 number. The value of the word is LOWPART + HIGHPART * BASE. */
138
139 #define LOWPART(x) \
140 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
141 #define HIGHPART(x) \
142 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
143 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
144
145 /* Unpack a two-word integer into 4 words.
146 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
147 WORDS points to the array of HOST_WIDE_INTs. */
148
149 static void
150 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
151 {
152 words[0] = LOWPART (low);
153 words[1] = HIGHPART (low);
154 words[2] = LOWPART (hi);
155 words[3] = HIGHPART (hi);
156 }
157
158 /* Pack an array of 4 words into a two-word integer.
159 WORDS points to the array of words.
160 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
161
162 static void
163 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
164 HOST_WIDE_INT *hi)
165 {
166 *low = words[0] + words[1] * BASE;
167 *hi = words[2] + words[3] * BASE;
168 }
169 \f
170 /* Make the integer constant T valid for its type by setting to 0 or 1 all
171 the bits in the constant that don't belong in the type.
172
173 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
174 nonzero, a signed overflow has already occurred in calculating T, so
175 propagate it. */
176
177 int
178 force_fit_type (tree t, int overflow)
179 {
180 unsigned HOST_WIDE_INT low;
181 HOST_WIDE_INT high;
182 unsigned int prec;
183
184 if (TREE_CODE (t) == REAL_CST)
185 {
186 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
187 Consider doing it via real_convert now. */
188 return overflow;
189 }
190
191 else if (TREE_CODE (t) != INTEGER_CST)
192 return overflow;
193
194 low = TREE_INT_CST_LOW (t);
195 high = TREE_INT_CST_HIGH (t);
196
197 if (POINTER_TYPE_P (TREE_TYPE (t))
198 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
199 prec = POINTER_SIZE;
200 else
201 prec = TYPE_PRECISION (TREE_TYPE (t));
202
203 /* First clear all bits that are beyond the type's precision. */
204
205 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
206 ;
207 else if (prec > HOST_BITS_PER_WIDE_INT)
208 TREE_INT_CST_HIGH (t)
209 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
210 else
211 {
212 TREE_INT_CST_HIGH (t) = 0;
213 if (prec < HOST_BITS_PER_WIDE_INT)
214 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
215 }
216
217 /* Unsigned types do not suffer sign extension or overflow unless they
218 are a sizetype. */
219 if (TREE_UNSIGNED (TREE_TYPE (t))
220 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
221 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
222 return overflow;
223
224 /* If the value's sign bit is set, extend the sign. */
225 if (prec != 2 * HOST_BITS_PER_WIDE_INT
226 && (prec > HOST_BITS_PER_WIDE_INT
227 ? 0 != (TREE_INT_CST_HIGH (t)
228 & ((HOST_WIDE_INT) 1
229 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
230 : 0 != (TREE_INT_CST_LOW (t)
231 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
232 {
233 /* Value is negative:
234 set to 1 all the bits that are outside this type's precision. */
235 if (prec > HOST_BITS_PER_WIDE_INT)
236 TREE_INT_CST_HIGH (t)
237 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
238 else
239 {
240 TREE_INT_CST_HIGH (t) = -1;
241 if (prec < HOST_BITS_PER_WIDE_INT)
242 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
243 }
244 }
245
246 /* Return nonzero if signed overflow occurred. */
247 return
248 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
249 != 0);
250 }
251 \f
252 /* Add two doubleword integers with doubleword result.
253 Each argument is given as two `HOST_WIDE_INT' pieces.
254 One argument is L1 and H1; the other, L2 and H2.
255 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
256
257 int
258 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
259 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
260 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
261 {
262 unsigned HOST_WIDE_INT l;
263 HOST_WIDE_INT h;
264
265 l = l1 + l2;
266 h = h1 + h2 + (l < l1);
267
268 *lv = l;
269 *hv = h;
270 return OVERFLOW_SUM_SIGN (h1, h2, h);
271 }
272
273 /* Negate a doubleword integer with doubleword result.
274 Return nonzero if the operation overflows, assuming it's signed.
275 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
276 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
277
278 int
279 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
280 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
281 {
282 if (l1 == 0)
283 {
284 *lv = 0;
285 *hv = - h1;
286 return (*hv & h1) < 0;
287 }
288 else
289 {
290 *lv = -l1;
291 *hv = ~h1;
292 return 0;
293 }
294 }
295 \f
296 /* Multiply two doubleword integers with doubleword result.
297 Return nonzero if the operation overflows, assuming it's signed.
298 Each argument is given as two `HOST_WIDE_INT' pieces.
299 One argument is L1 and H1; the other, L2 and H2.
300 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
301
302 int
303 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
304 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
305 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
306 {
307 HOST_WIDE_INT arg1[4];
308 HOST_WIDE_INT arg2[4];
309 HOST_WIDE_INT prod[4 * 2];
310 unsigned HOST_WIDE_INT carry;
311 int i, j, k;
312 unsigned HOST_WIDE_INT toplow, neglow;
313 HOST_WIDE_INT tophigh, neghigh;
314
315 encode (arg1, l1, h1);
316 encode (arg2, l2, h2);
317
318 memset (prod, 0, sizeof prod);
319
320 for (i = 0; i < 4; i++)
321 {
322 carry = 0;
323 for (j = 0; j < 4; j++)
324 {
325 k = i + j;
326 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
327 carry += arg1[i] * arg2[j];
328 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
329 carry += prod[k];
330 prod[k] = LOWPART (carry);
331 carry = HIGHPART (carry);
332 }
333 prod[i + 4] = carry;
334 }
335
336 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
337
338 /* Check for overflow by calculating the top half of the answer in full;
339 it should agree with the low half's sign bit. */
340 decode (prod + 4, &toplow, &tophigh);
341 if (h1 < 0)
342 {
343 neg_double (l2, h2, &neglow, &neghigh);
344 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
345 }
346 if (h2 < 0)
347 {
348 neg_double (l1, h1, &neglow, &neghigh);
349 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
350 }
351 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
352 }
353 \f
354 /* Shift the doubleword integer in L1, H1 left by COUNT places
355 keeping only PREC bits of result.
356 Shift right if COUNT is negative.
357 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
358 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
359
360 void
361 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
362 HOST_WIDE_INT count, unsigned int prec,
363 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
364 {
365 unsigned HOST_WIDE_INT signmask;
366
367 if (count < 0)
368 {
369 rshift_double (l1, h1, -count, prec, lv, hv, arith);
370 return;
371 }
372
373 #ifdef SHIFT_COUNT_TRUNCATED
374 if (SHIFT_COUNT_TRUNCATED)
375 count %= prec;
376 #endif
377
378 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
379 {
380 /* Shifting by the host word size is undefined according to the
381 ANSI standard, so we must handle this as a special case. */
382 *hv = 0;
383 *lv = 0;
384 }
385 else if (count >= HOST_BITS_PER_WIDE_INT)
386 {
387 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
388 *lv = 0;
389 }
390 else
391 {
392 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
393 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
394 *lv = l1 << count;
395 }
396
397 /* Sign extend all bits that are beyond the precision. */
398
399 signmask = -((prec > HOST_BITS_PER_WIDE_INT
400 ? ((unsigned HOST_WIDE_INT) *hv
401 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
402 : (*lv >> (prec - 1))) & 1);
403
404 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
405 ;
406 else if (prec >= HOST_BITS_PER_WIDE_INT)
407 {
408 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
409 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
410 }
411 else
412 {
413 *hv = signmask;
414 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
415 *lv |= signmask << prec;
416 }
417 }
418
419 /* Shift the doubleword integer in L1, H1 right by COUNT places
420 keeping only PREC bits of result. COUNT must be positive.
421 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
422 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
423
424 void
425 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
426 HOST_WIDE_INT count, unsigned int prec,
427 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
428 int arith)
429 {
430 unsigned HOST_WIDE_INT signmask;
431
432 signmask = (arith
433 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
434 : 0);
435
436 #ifdef SHIFT_COUNT_TRUNCATED
437 if (SHIFT_COUNT_TRUNCATED)
438 count %= prec;
439 #endif
440
441 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
442 {
443 /* Shifting by the host word size is undefined according to the
444 ANSI standard, so we must handle this as a special case. */
445 *hv = 0;
446 *lv = 0;
447 }
448 else if (count >= HOST_BITS_PER_WIDE_INT)
449 {
450 *hv = 0;
451 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
452 }
453 else
454 {
455 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
456 *lv = ((l1 >> count)
457 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
458 }
459
460 /* Zero / sign extend all bits that are beyond the precision. */
461
462 if (count >= (HOST_WIDE_INT)prec)
463 {
464 *hv = signmask;
465 *lv = signmask;
466 }
467 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
468 ;
469 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
470 {
471 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
472 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
473 }
474 else
475 {
476 *hv = signmask;
477 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
478 *lv |= signmask << (prec - count);
479 }
480 }
481 \f
482 /* Rotate the doubleword integer in L1, H1 left by COUNT places
483 keeping only PREC bits of result.
484 Rotate right if COUNT is negative.
485 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
486
487 void
488 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
489 HOST_WIDE_INT count, unsigned int prec,
490 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
491 {
492 unsigned HOST_WIDE_INT s1l, s2l;
493 HOST_WIDE_INT s1h, s2h;
494
495 count %= prec;
496 if (count < 0)
497 count += prec;
498
499 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
500 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
501 *lv = s1l | s2l;
502 *hv = s1h | s2h;
503 }
504
505 /* Rotate the doubleword integer in L1, H1 left by COUNT places
506 keeping only PREC bits of result. COUNT must be positive.
507 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
508
509 void
510 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
511 HOST_WIDE_INT count, unsigned int prec,
512 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
513 {
514 unsigned HOST_WIDE_INT s1l, s2l;
515 HOST_WIDE_INT s1h, s2h;
516
517 count %= prec;
518 if (count < 0)
519 count += prec;
520
521 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
522 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
523 *lv = s1l | s2l;
524 *hv = s1h | s2h;
525 }
526 \f
527 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
528 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
529 CODE is a tree code for a kind of division, one of
530 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
531 or EXACT_DIV_EXPR
532 It controls how the quotient is rounded to an integer.
533 Return nonzero if the operation overflows.
534 UNS nonzero says do unsigned division. */
535
536 int
537 div_and_round_double (enum tree_code code, int uns,
538 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
539 HOST_WIDE_INT hnum_orig,
540 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
541 HOST_WIDE_INT hden_orig,
542 unsigned HOST_WIDE_INT *lquo,
543 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
544 HOST_WIDE_INT *hrem)
545 {
546 int quo_neg = 0;
547 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
548 HOST_WIDE_INT den[4], quo[4];
549 int i, j;
550 unsigned HOST_WIDE_INT work;
551 unsigned HOST_WIDE_INT carry = 0;
552 unsigned HOST_WIDE_INT lnum = lnum_orig;
553 HOST_WIDE_INT hnum = hnum_orig;
554 unsigned HOST_WIDE_INT lden = lden_orig;
555 HOST_WIDE_INT hden = hden_orig;
556 int overflow = 0;
557
558 if (hden == 0 && lden == 0)
559 overflow = 1, lden = 1;
560
561 /* Calculate quotient sign and convert operands to unsigned. */
562 if (!uns)
563 {
564 if (hnum < 0)
565 {
566 quo_neg = ~ quo_neg;
567 /* (minimum integer) / (-1) is the only overflow case. */
568 if (neg_double (lnum, hnum, &lnum, &hnum)
569 && ((HOST_WIDE_INT) lden & hden) == -1)
570 overflow = 1;
571 }
572 if (hden < 0)
573 {
574 quo_neg = ~ quo_neg;
575 neg_double (lden, hden, &lden, &hden);
576 }
577 }
578
579 if (hnum == 0 && hden == 0)
580 { /* single precision */
581 *hquo = *hrem = 0;
582 /* This unsigned division rounds toward zero. */
583 *lquo = lnum / lden;
584 goto finish_up;
585 }
586
587 if (hnum == 0)
588 { /* trivial case: dividend < divisor */
589 /* hden != 0 already checked. */
590 *hquo = *lquo = 0;
591 *hrem = hnum;
592 *lrem = lnum;
593 goto finish_up;
594 }
595
596 memset (quo, 0, sizeof quo);
597
598 memset (num, 0, sizeof num); /* to zero 9th element */
599 memset (den, 0, sizeof den);
600
601 encode (num, lnum, hnum);
602 encode (den, lden, hden);
603
604 /* Special code for when the divisor < BASE. */
605 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
606 {
607 /* hnum != 0 already checked. */
608 for (i = 4 - 1; i >= 0; i--)
609 {
610 work = num[i] + carry * BASE;
611 quo[i] = work / lden;
612 carry = work % lden;
613 }
614 }
615 else
616 {
617 /* Full double precision division,
618 with thanks to Don Knuth's "Seminumerical Algorithms". */
619 int num_hi_sig, den_hi_sig;
620 unsigned HOST_WIDE_INT quo_est, scale;
621
622 /* Find the highest nonzero divisor digit. */
623 for (i = 4 - 1;; i--)
624 if (den[i] != 0)
625 {
626 den_hi_sig = i;
627 break;
628 }
629
630 /* Insure that the first digit of the divisor is at least BASE/2.
631 This is required by the quotient digit estimation algorithm. */
632
633 scale = BASE / (den[den_hi_sig] + 1);
634 if (scale > 1)
635 { /* scale divisor and dividend */
636 carry = 0;
637 for (i = 0; i <= 4 - 1; i++)
638 {
639 work = (num[i] * scale) + carry;
640 num[i] = LOWPART (work);
641 carry = HIGHPART (work);
642 }
643
644 num[4] = carry;
645 carry = 0;
646 for (i = 0; i <= 4 - 1; i++)
647 {
648 work = (den[i] * scale) + carry;
649 den[i] = LOWPART (work);
650 carry = HIGHPART (work);
651 if (den[i] != 0) den_hi_sig = i;
652 }
653 }
654
655 num_hi_sig = 4;
656
657 /* Main loop */
658 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
659 {
660 /* Guess the next quotient digit, quo_est, by dividing the first
661 two remaining dividend digits by the high order quotient digit.
662 quo_est is never low and is at most 2 high. */
663 unsigned HOST_WIDE_INT tmp;
664
665 num_hi_sig = i + den_hi_sig + 1;
666 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
667 if (num[num_hi_sig] != den[den_hi_sig])
668 quo_est = work / den[den_hi_sig];
669 else
670 quo_est = BASE - 1;
671
672 /* Refine quo_est so it's usually correct, and at most one high. */
673 tmp = work - quo_est * den[den_hi_sig];
674 if (tmp < BASE
675 && (den[den_hi_sig - 1] * quo_est
676 > (tmp * BASE + num[num_hi_sig - 2])))
677 quo_est--;
678
679 /* Try QUO_EST as the quotient digit, by multiplying the
680 divisor by QUO_EST and subtracting from the remaining dividend.
681 Keep in mind that QUO_EST is the I - 1st digit. */
682
683 carry = 0;
684 for (j = 0; j <= den_hi_sig; j++)
685 {
686 work = quo_est * den[j] + carry;
687 carry = HIGHPART (work);
688 work = num[i + j] - LOWPART (work);
689 num[i + j] = LOWPART (work);
690 carry += HIGHPART (work) != 0;
691 }
692
693 /* If quo_est was high by one, then num[i] went negative and
694 we need to correct things. */
695 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
696 {
697 quo_est--;
698 carry = 0; /* add divisor back in */
699 for (j = 0; j <= den_hi_sig; j++)
700 {
701 work = num[i + j] + den[j] + carry;
702 carry = HIGHPART (work);
703 num[i + j] = LOWPART (work);
704 }
705
706 num [num_hi_sig] += carry;
707 }
708
709 /* Store the quotient digit. */
710 quo[i] = quo_est;
711 }
712 }
713
714 decode (quo, lquo, hquo);
715
716 finish_up:
717 /* If result is negative, make it so. */
718 if (quo_neg)
719 neg_double (*lquo, *hquo, lquo, hquo);
720
721 /* compute trial remainder: rem = num - (quo * den) */
722 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
723 neg_double (*lrem, *hrem, lrem, hrem);
724 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
725
726 switch (code)
727 {
728 case TRUNC_DIV_EXPR:
729 case TRUNC_MOD_EXPR: /* round toward zero */
730 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
731 return overflow;
732
733 case FLOOR_DIV_EXPR:
734 case FLOOR_MOD_EXPR: /* round toward negative infinity */
735 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
736 {
737 /* quo = quo - 1; */
738 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
739 lquo, hquo);
740 }
741 else
742 return overflow;
743 break;
744
745 case CEIL_DIV_EXPR:
746 case CEIL_MOD_EXPR: /* round toward positive infinity */
747 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
748 {
749 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
750 lquo, hquo);
751 }
752 else
753 return overflow;
754 break;
755
756 case ROUND_DIV_EXPR:
757 case ROUND_MOD_EXPR: /* round to closest integer */
758 {
759 unsigned HOST_WIDE_INT labs_rem = *lrem;
760 HOST_WIDE_INT habs_rem = *hrem;
761 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
762 HOST_WIDE_INT habs_den = hden, htwice;
763
764 /* Get absolute values. */
765 if (*hrem < 0)
766 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
767 if (hden < 0)
768 neg_double (lden, hden, &labs_den, &habs_den);
769
770 /* If (2 * abs (lrem) >= abs (lden)) */
771 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
772 labs_rem, habs_rem, &ltwice, &htwice);
773
774 if (((unsigned HOST_WIDE_INT) habs_den
775 < (unsigned HOST_WIDE_INT) htwice)
776 || (((unsigned HOST_WIDE_INT) habs_den
777 == (unsigned HOST_WIDE_INT) htwice)
778 && (labs_den < ltwice)))
779 {
780 if (*hquo < 0)
781 /* quo = quo - 1; */
782 add_double (*lquo, *hquo,
783 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
784 else
785 /* quo = quo + 1; */
786 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
787 lquo, hquo);
788 }
789 else
790 return overflow;
791 }
792 break;
793
794 default:
795 abort ();
796 }
797
798 /* compute true remainder: rem = num - (quo * den) */
799 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
800 neg_double (*lrem, *hrem, lrem, hrem);
801 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
802 return overflow;
803 }
804 \f
805 /* Determine whether an expression T can be cheaply negated using
806 the function negate_expr. */
807
808 static bool
809 negate_expr_p (tree t)
810 {
811 unsigned HOST_WIDE_INT val;
812 unsigned int prec;
813 tree type;
814
815 if (t == 0)
816 return false;
817
818 type = TREE_TYPE (t);
819
820 STRIP_SIGN_NOPS (t);
821 switch (TREE_CODE (t))
822 {
823 case INTEGER_CST:
824 if (TREE_UNSIGNED (type))
825 return false;
826
827 /* Check that -CST will not overflow type. */
828 prec = TYPE_PRECISION (type);
829 if (prec > HOST_BITS_PER_WIDE_INT)
830 {
831 if (TREE_INT_CST_LOW (t) != 0)
832 return true;
833 prec -= HOST_BITS_PER_WIDE_INT;
834 val = TREE_INT_CST_HIGH (t);
835 }
836 else
837 val = TREE_INT_CST_LOW (t);
838 if (prec < HOST_BITS_PER_WIDE_INT)
839 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
840 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
841
842 case REAL_CST:
843 case NEGATE_EXPR:
844 return true;
845
846 case MINUS_EXPR:
847 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
848 return ! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations;
849
850 case MULT_EXPR:
851 if (TREE_UNSIGNED (TREE_TYPE (t)))
852 break;
853
854 /* Fall through. */
855
856 case RDIV_EXPR:
857 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
858 return negate_expr_p (TREE_OPERAND (t, 1))
859 || negate_expr_p (TREE_OPERAND (t, 0));
860 break;
861
862 default:
863 break;
864 }
865 return false;
866 }
867
868 /* Given T, an expression, return the negation of T. Allow for T to be
869 null, in which case return null. */
870
871 static tree
872 negate_expr (tree t)
873 {
874 tree type;
875 tree tem;
876
877 if (t == 0)
878 return 0;
879
880 type = TREE_TYPE (t);
881 STRIP_SIGN_NOPS (t);
882
883 switch (TREE_CODE (t))
884 {
885 case INTEGER_CST:
886 if (! TREE_UNSIGNED (type)
887 && 0 != (tem = fold (build1 (NEGATE_EXPR, type, t)))
888 && ! TREE_OVERFLOW (tem))
889 return tem;
890 break;
891
892 case REAL_CST:
893 tem = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (t)));
894 /* Two's complement FP formats, such as c4x, may overflow. */
895 if (! TREE_OVERFLOW (tem))
896 return convert (type, tem);
897 break;
898
899 case NEGATE_EXPR:
900 return convert (type, TREE_OPERAND (t, 0));
901
902 case MINUS_EXPR:
903 /* - (A - B) -> B - A */
904 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
905 return convert (type,
906 fold (build (MINUS_EXPR, TREE_TYPE (t),
907 TREE_OPERAND (t, 1),
908 TREE_OPERAND (t, 0))));
909 break;
910
911 case MULT_EXPR:
912 if (TREE_UNSIGNED (TREE_TYPE (t)))
913 break;
914
915 /* Fall through. */
916
917 case RDIV_EXPR:
918 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
919 {
920 tem = TREE_OPERAND (t, 1);
921 if (negate_expr_p (tem))
922 return convert (type,
923 fold (build (TREE_CODE (t), TREE_TYPE (t),
924 TREE_OPERAND (t, 0),
925 negate_expr (tem))));
926 tem = TREE_OPERAND (t, 0);
927 if (negate_expr_p (tem))
928 return convert (type,
929 fold (build (TREE_CODE (t), TREE_TYPE (t),
930 negate_expr (tem),
931 TREE_OPERAND (t, 1))));
932 }
933 break;
934
935 default:
936 break;
937 }
938
939 return convert (type, fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t)));
940 }
941 \f
942 /* Split a tree IN into a constant, literal and variable parts that could be
943 combined with CODE to make IN. "constant" means an expression with
944 TREE_CONSTANT but that isn't an actual constant. CODE must be a
945 commutative arithmetic operation. Store the constant part into *CONP,
946 the literal in *LITP and return the variable part. If a part isn't
947 present, set it to null. If the tree does not decompose in this way,
948 return the entire tree as the variable part and the other parts as null.
949
950 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
951 case, we negate an operand that was subtracted. Except if it is a
952 literal for which we use *MINUS_LITP instead.
953
954 If NEGATE_P is true, we are negating all of IN, again except a literal
955 for which we use *MINUS_LITP instead.
956
957 If IN is itself a literal or constant, return it as appropriate.
958
959 Note that we do not guarantee that any of the three values will be the
960 same type as IN, but they will have the same signedness and mode. */
961
962 static tree
963 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
964 tree *minus_litp, int negate_p)
965 {
966 tree var = 0;
967
968 *conp = 0;
969 *litp = 0;
970 *minus_litp = 0;
971
972 /* Strip any conversions that don't change the machine mode or signedness. */
973 STRIP_SIGN_NOPS (in);
974
975 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
976 *litp = in;
977 else if (TREE_CODE (in) == code
978 || (! FLOAT_TYPE_P (TREE_TYPE (in))
979 /* We can associate addition and subtraction together (even
980 though the C standard doesn't say so) for integers because
981 the value is not affected. For reals, the value might be
982 affected, so we can't. */
983 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
984 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
985 {
986 tree op0 = TREE_OPERAND (in, 0);
987 tree op1 = TREE_OPERAND (in, 1);
988 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
989 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
990
991 /* First see if either of the operands is a literal, then a constant. */
992 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
993 *litp = op0, op0 = 0;
994 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
995 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
996
997 if (op0 != 0 && TREE_CONSTANT (op0))
998 *conp = op0, op0 = 0;
999 else if (op1 != 0 && TREE_CONSTANT (op1))
1000 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1001
1002 /* If we haven't dealt with either operand, this is not a case we can
1003 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1004 if (op0 != 0 && op1 != 0)
1005 var = in;
1006 else if (op0 != 0)
1007 var = op0;
1008 else
1009 var = op1, neg_var_p = neg1_p;
1010
1011 /* Now do any needed negations. */
1012 if (neg_litp_p)
1013 *minus_litp = *litp, *litp = 0;
1014 if (neg_conp_p)
1015 *conp = negate_expr (*conp);
1016 if (neg_var_p)
1017 var = negate_expr (var);
1018 }
1019 else if (TREE_CONSTANT (in))
1020 *conp = in;
1021 else
1022 var = in;
1023
1024 if (negate_p)
1025 {
1026 if (*litp)
1027 *minus_litp = *litp, *litp = 0;
1028 else if (*minus_litp)
1029 *litp = *minus_litp, *minus_litp = 0;
1030 *conp = negate_expr (*conp);
1031 var = negate_expr (var);
1032 }
1033
1034 return var;
1035 }
1036
1037 /* Re-associate trees split by the above function. T1 and T2 are either
1038 expressions to associate or null. Return the new expression, if any. If
1039 we build an operation, do it in TYPE and with CODE. */
1040
1041 static tree
1042 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1043 {
1044 if (t1 == 0)
1045 return t2;
1046 else if (t2 == 0)
1047 return t1;
1048
1049 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1050 try to fold this since we will have infinite recursion. But do
1051 deal with any NEGATE_EXPRs. */
1052 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1053 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1054 {
1055 if (code == PLUS_EXPR)
1056 {
1057 if (TREE_CODE (t1) == NEGATE_EXPR)
1058 return build (MINUS_EXPR, type, convert (type, t2),
1059 convert (type, TREE_OPERAND (t1, 0)));
1060 else if (TREE_CODE (t2) == NEGATE_EXPR)
1061 return build (MINUS_EXPR, type, convert (type, t1),
1062 convert (type, TREE_OPERAND (t2, 0)));
1063 }
1064 return build (code, type, convert (type, t1), convert (type, t2));
1065 }
1066
1067 return fold (build (code, type, convert (type, t1), convert (type, t2)));
1068 }
1069 \f
1070 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1071 to produce a new constant.
1072
1073 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1074
1075 static tree
1076 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1077 {
1078 unsigned HOST_WIDE_INT int1l, int2l;
1079 HOST_WIDE_INT int1h, int2h;
1080 unsigned HOST_WIDE_INT low;
1081 HOST_WIDE_INT hi;
1082 unsigned HOST_WIDE_INT garbagel;
1083 HOST_WIDE_INT garbageh;
1084 tree t;
1085 tree type = TREE_TYPE (arg1);
1086 int uns = TREE_UNSIGNED (type);
1087 int is_sizetype
1088 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1089 int overflow = 0;
1090 int no_overflow = 0;
1091
1092 int1l = TREE_INT_CST_LOW (arg1);
1093 int1h = TREE_INT_CST_HIGH (arg1);
1094 int2l = TREE_INT_CST_LOW (arg2);
1095 int2h = TREE_INT_CST_HIGH (arg2);
1096
1097 switch (code)
1098 {
1099 case BIT_IOR_EXPR:
1100 low = int1l | int2l, hi = int1h | int2h;
1101 break;
1102
1103 case BIT_XOR_EXPR:
1104 low = int1l ^ int2l, hi = int1h ^ int2h;
1105 break;
1106
1107 case BIT_AND_EXPR:
1108 low = int1l & int2l, hi = int1h & int2h;
1109 break;
1110
1111 case RSHIFT_EXPR:
1112 int2l = -int2l;
1113 case LSHIFT_EXPR:
1114 /* It's unclear from the C standard whether shifts can overflow.
1115 The following code ignores overflow; perhaps a C standard
1116 interpretation ruling is needed. */
1117 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1118 &low, &hi, !uns);
1119 no_overflow = 1;
1120 break;
1121
1122 case RROTATE_EXPR:
1123 int2l = - int2l;
1124 case LROTATE_EXPR:
1125 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1126 &low, &hi);
1127 break;
1128
1129 case PLUS_EXPR:
1130 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1131 break;
1132
1133 case MINUS_EXPR:
1134 neg_double (int2l, int2h, &low, &hi);
1135 add_double (int1l, int1h, low, hi, &low, &hi);
1136 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1137 break;
1138
1139 case MULT_EXPR:
1140 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1141 break;
1142
1143 case TRUNC_DIV_EXPR:
1144 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1145 case EXACT_DIV_EXPR:
1146 /* This is a shortcut for a common special case. */
1147 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1148 && ! TREE_CONSTANT_OVERFLOW (arg1)
1149 && ! TREE_CONSTANT_OVERFLOW (arg2)
1150 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1151 {
1152 if (code == CEIL_DIV_EXPR)
1153 int1l += int2l - 1;
1154
1155 low = int1l / int2l, hi = 0;
1156 break;
1157 }
1158
1159 /* ... fall through ... */
1160
1161 case ROUND_DIV_EXPR:
1162 if (int2h == 0 && int2l == 1)
1163 {
1164 low = int1l, hi = int1h;
1165 break;
1166 }
1167 if (int1l == int2l && int1h == int2h
1168 && ! (int1l == 0 && int1h == 0))
1169 {
1170 low = 1, hi = 0;
1171 break;
1172 }
1173 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1174 &low, &hi, &garbagel, &garbageh);
1175 break;
1176
1177 case TRUNC_MOD_EXPR:
1178 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1179 /* This is a shortcut for a common special case. */
1180 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1181 && ! TREE_CONSTANT_OVERFLOW (arg1)
1182 && ! TREE_CONSTANT_OVERFLOW (arg2)
1183 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1184 {
1185 if (code == CEIL_MOD_EXPR)
1186 int1l += int2l - 1;
1187 low = int1l % int2l, hi = 0;
1188 break;
1189 }
1190
1191 /* ... fall through ... */
1192
1193 case ROUND_MOD_EXPR:
1194 overflow = div_and_round_double (code, uns,
1195 int1l, int1h, int2l, int2h,
1196 &garbagel, &garbageh, &low, &hi);
1197 break;
1198
1199 case MIN_EXPR:
1200 case MAX_EXPR:
1201 if (uns)
1202 low = (((unsigned HOST_WIDE_INT) int1h
1203 < (unsigned HOST_WIDE_INT) int2h)
1204 || (((unsigned HOST_WIDE_INT) int1h
1205 == (unsigned HOST_WIDE_INT) int2h)
1206 && int1l < int2l));
1207 else
1208 low = (int1h < int2h
1209 || (int1h == int2h && int1l < int2l));
1210
1211 if (low == (code == MIN_EXPR))
1212 low = int1l, hi = int1h;
1213 else
1214 low = int2l, hi = int2h;
1215 break;
1216
1217 default:
1218 abort ();
1219 }
1220
1221 /* If this is for a sizetype, can be represented as one (signed)
1222 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1223 constants. */
1224 if (is_sizetype
1225 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1226 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1227 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1228 return size_int_type_wide (low, type);
1229 else
1230 {
1231 t = build_int_2 (low, hi);
1232 TREE_TYPE (t) = TREE_TYPE (arg1);
1233 }
1234
1235 TREE_OVERFLOW (t)
1236 = ((notrunc
1237 ? (!uns || is_sizetype) && overflow
1238 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1239 && ! no_overflow))
1240 | TREE_OVERFLOW (arg1)
1241 | TREE_OVERFLOW (arg2));
1242
1243 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1244 So check if force_fit_type truncated the value. */
1245 if (is_sizetype
1246 && ! TREE_OVERFLOW (t)
1247 && (TREE_INT_CST_HIGH (t) != hi
1248 || TREE_INT_CST_LOW (t) != low))
1249 TREE_OVERFLOW (t) = 1;
1250
1251 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1252 | TREE_CONSTANT_OVERFLOW (arg1)
1253 | TREE_CONSTANT_OVERFLOW (arg2));
1254 return t;
1255 }
1256
1257 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1258 constant. We assume ARG1 and ARG2 have the same data type, or at least
1259 are the same kind of constant and the same machine mode.
1260
1261 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1262
1263 static tree
1264 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1265 {
1266 STRIP_NOPS (arg1);
1267 STRIP_NOPS (arg2);
1268
1269 if (TREE_CODE (arg1) == INTEGER_CST)
1270 return int_const_binop (code, arg1, arg2, notrunc);
1271
1272 if (TREE_CODE (arg1) == REAL_CST)
1273 {
1274 enum machine_mode mode;
1275 REAL_VALUE_TYPE d1;
1276 REAL_VALUE_TYPE d2;
1277 REAL_VALUE_TYPE value;
1278 tree t, type;
1279
1280 d1 = TREE_REAL_CST (arg1);
1281 d2 = TREE_REAL_CST (arg2);
1282
1283 type = TREE_TYPE (arg1);
1284 mode = TYPE_MODE (type);
1285
1286 /* Don't perform operation if we honor signaling NaNs and
1287 either operand is a NaN. */
1288 if (HONOR_SNANS (mode)
1289 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1290 return NULL_TREE;
1291
1292 /* Don't perform operation if it would raise a division
1293 by zero exception. */
1294 if (code == RDIV_EXPR
1295 && REAL_VALUES_EQUAL (d2, dconst0)
1296 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1297 return NULL_TREE;
1298
1299 /* If either operand is a NaN, just return it. Otherwise, set up
1300 for floating-point trap; we return an overflow. */
1301 if (REAL_VALUE_ISNAN (d1))
1302 return arg1;
1303 else if (REAL_VALUE_ISNAN (d2))
1304 return arg2;
1305
1306 REAL_ARITHMETIC (value, code, d1, d2);
1307
1308 t = build_real (type, real_value_truncate (mode, value));
1309
1310 TREE_OVERFLOW (t)
1311 = (force_fit_type (t, 0)
1312 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1313 TREE_CONSTANT_OVERFLOW (t)
1314 = TREE_OVERFLOW (t)
1315 | TREE_CONSTANT_OVERFLOW (arg1)
1316 | TREE_CONSTANT_OVERFLOW (arg2);
1317 return t;
1318 }
1319 if (TREE_CODE (arg1) == COMPLEX_CST)
1320 {
1321 tree type = TREE_TYPE (arg1);
1322 tree r1 = TREE_REALPART (arg1);
1323 tree i1 = TREE_IMAGPART (arg1);
1324 tree r2 = TREE_REALPART (arg2);
1325 tree i2 = TREE_IMAGPART (arg2);
1326 tree t;
1327
1328 switch (code)
1329 {
1330 case PLUS_EXPR:
1331 t = build_complex (type,
1332 const_binop (PLUS_EXPR, r1, r2, notrunc),
1333 const_binop (PLUS_EXPR, i1, i2, notrunc));
1334 break;
1335
1336 case MINUS_EXPR:
1337 t = build_complex (type,
1338 const_binop (MINUS_EXPR, r1, r2, notrunc),
1339 const_binop (MINUS_EXPR, i1, i2, notrunc));
1340 break;
1341
1342 case MULT_EXPR:
1343 t = build_complex (type,
1344 const_binop (MINUS_EXPR,
1345 const_binop (MULT_EXPR,
1346 r1, r2, notrunc),
1347 const_binop (MULT_EXPR,
1348 i1, i2, notrunc),
1349 notrunc),
1350 const_binop (PLUS_EXPR,
1351 const_binop (MULT_EXPR,
1352 r1, i2, notrunc),
1353 const_binop (MULT_EXPR,
1354 i1, r2, notrunc),
1355 notrunc));
1356 break;
1357
1358 case RDIV_EXPR:
1359 {
1360 tree magsquared
1361 = const_binop (PLUS_EXPR,
1362 const_binop (MULT_EXPR, r2, r2, notrunc),
1363 const_binop (MULT_EXPR, i2, i2, notrunc),
1364 notrunc);
1365
1366 t = build_complex (type,
1367 const_binop
1368 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1369 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1370 const_binop (PLUS_EXPR,
1371 const_binop (MULT_EXPR, r1, r2,
1372 notrunc),
1373 const_binop (MULT_EXPR, i1, i2,
1374 notrunc),
1375 notrunc),
1376 magsquared, notrunc),
1377 const_binop
1378 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1379 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1380 const_binop (MINUS_EXPR,
1381 const_binop (MULT_EXPR, i1, r2,
1382 notrunc),
1383 const_binop (MULT_EXPR, r1, i2,
1384 notrunc),
1385 notrunc),
1386 magsquared, notrunc));
1387 }
1388 break;
1389
1390 default:
1391 abort ();
1392 }
1393 return t;
1394 }
1395 return 0;
1396 }
1397
1398 /* These are the hash table functions for the hash table of INTEGER_CST
1399 nodes of a sizetype. */
1400
1401 /* Return the hash code code X, an INTEGER_CST. */
1402
1403 static hashval_t
1404 size_htab_hash (const void *x)
1405 {
1406 tree t = (tree) x;
1407
1408 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1409 ^ htab_hash_pointer (TREE_TYPE (t))
1410 ^ (TREE_OVERFLOW (t) << 20));
1411 }
1412
1413 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1414 is the same as that given by *Y, which is the same. */
1415
1416 static int
1417 size_htab_eq (const void *x, const void *y)
1418 {
1419 tree xt = (tree) x;
1420 tree yt = (tree) y;
1421
1422 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1423 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1424 && TREE_TYPE (xt) == TREE_TYPE (yt)
1425 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1426 }
1427 \f
1428 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1429 bits are given by NUMBER and of the sizetype represented by KIND. */
1430
1431 tree
1432 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1433 {
1434 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1435 }
1436
1437 /* Likewise, but the desired type is specified explicitly. */
1438
1439 static GTY (()) tree new_const;
1440 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1441 htab_t size_htab;
1442
1443 tree
1444 size_int_type_wide (HOST_WIDE_INT number, tree type)
1445 {
1446 void **slot;
1447
1448 if (size_htab == 0)
1449 {
1450 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1451 new_const = make_node (INTEGER_CST);
1452 }
1453
1454 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1455 hash table, we return the value from the hash table. Otherwise, we
1456 place that in the hash table and make a new node for the next time. */
1457 TREE_INT_CST_LOW (new_const) = number;
1458 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1459 TREE_TYPE (new_const) = type;
1460 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1461 = force_fit_type (new_const, 0);
1462
1463 slot = htab_find_slot (size_htab, new_const, INSERT);
1464 if (*slot == 0)
1465 {
1466 tree t = new_const;
1467
1468 *slot = new_const;
1469 new_const = make_node (INTEGER_CST);
1470 return t;
1471 }
1472 else
1473 return (tree) *slot;
1474 }
1475
1476 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1477 is a tree code. The type of the result is taken from the operands.
1478 Both must be the same type integer type and it must be a size type.
1479 If the operands are constant, so is the result. */
1480
1481 tree
1482 size_binop (enum tree_code code, tree arg0, tree arg1)
1483 {
1484 tree type = TREE_TYPE (arg0);
1485
1486 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1487 || type != TREE_TYPE (arg1))
1488 abort ();
1489
1490 /* Handle the special case of two integer constants faster. */
1491 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1492 {
1493 /* And some specific cases even faster than that. */
1494 if (code == PLUS_EXPR && integer_zerop (arg0))
1495 return arg1;
1496 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1497 && integer_zerop (arg1))
1498 return arg0;
1499 else if (code == MULT_EXPR && integer_onep (arg0))
1500 return arg1;
1501
1502 /* Handle general case of two integer constants. */
1503 return int_const_binop (code, arg0, arg1, 0);
1504 }
1505
1506 if (arg0 == error_mark_node || arg1 == error_mark_node)
1507 return error_mark_node;
1508
1509 return fold (build (code, type, arg0, arg1));
1510 }
1511
1512 /* Given two values, either both of sizetype or both of bitsizetype,
1513 compute the difference between the two values. Return the value
1514 in signed type corresponding to the type of the operands. */
1515
1516 tree
1517 size_diffop (tree arg0, tree arg1)
1518 {
1519 tree type = TREE_TYPE (arg0);
1520 tree ctype;
1521
1522 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1523 || type != TREE_TYPE (arg1))
1524 abort ();
1525
1526 /* If the type is already signed, just do the simple thing. */
1527 if (! TREE_UNSIGNED (type))
1528 return size_binop (MINUS_EXPR, arg0, arg1);
1529
1530 ctype = (type == bitsizetype || type == ubitsizetype
1531 ? sbitsizetype : ssizetype);
1532
1533 /* If either operand is not a constant, do the conversions to the signed
1534 type and subtract. The hardware will do the right thing with any
1535 overflow in the subtraction. */
1536 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1537 return size_binop (MINUS_EXPR, convert (ctype, arg0),
1538 convert (ctype, arg1));
1539
1540 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1541 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1542 overflow) and negate (which can't either). Special-case a result
1543 of zero while we're here. */
1544 if (tree_int_cst_equal (arg0, arg1))
1545 return convert (ctype, integer_zero_node);
1546 else if (tree_int_cst_lt (arg1, arg0))
1547 return convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1548 else
1549 return size_binop (MINUS_EXPR, convert (ctype, integer_zero_node),
1550 convert (ctype, size_binop (MINUS_EXPR, arg1, arg0)));
1551 }
1552 \f
1553
1554 /* Given T, a tree representing type conversion of ARG1, a constant,
1555 return a constant tree representing the result of conversion. */
1556
1557 static tree
1558 fold_convert (tree t, tree arg1)
1559 {
1560 tree type = TREE_TYPE (t);
1561 int overflow = 0;
1562
1563 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1564 {
1565 if (TREE_CODE (arg1) == INTEGER_CST)
1566 {
1567 /* If we would build a constant wider than GCC supports,
1568 leave the conversion unfolded. */
1569 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1570 return t;
1571
1572 /* If we are trying to make a sizetype for a small integer, use
1573 size_int to pick up cached types to reduce duplicate nodes. */
1574 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1575 && !TREE_CONSTANT_OVERFLOW (arg1)
1576 && compare_tree_int (arg1, 10000) < 0)
1577 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1578
1579 /* Given an integer constant, make new constant with new type,
1580 appropriately sign-extended or truncated. */
1581 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1582 TREE_INT_CST_HIGH (arg1));
1583 TREE_TYPE (t) = type;
1584 /* Indicate an overflow if (1) ARG1 already overflowed,
1585 or (2) force_fit_type indicates an overflow.
1586 Tell force_fit_type that an overflow has already occurred
1587 if ARG1 is a too-large unsigned value and T is signed.
1588 But don't indicate an overflow if converting a pointer. */
1589 TREE_OVERFLOW (t)
1590 = ((force_fit_type (t,
1591 (TREE_INT_CST_HIGH (arg1) < 0
1592 && (TREE_UNSIGNED (type)
1593 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1594 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1595 || TREE_OVERFLOW (arg1));
1596 TREE_CONSTANT_OVERFLOW (t)
1597 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1598 }
1599 else if (TREE_CODE (arg1) == REAL_CST)
1600 {
1601 /* Don't initialize these, use assignments.
1602 Initialized local aggregates don't work on old compilers. */
1603 REAL_VALUE_TYPE x;
1604 REAL_VALUE_TYPE l;
1605 REAL_VALUE_TYPE u;
1606 tree type1 = TREE_TYPE (arg1);
1607 int no_upper_bound;
1608
1609 x = TREE_REAL_CST (arg1);
1610 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1611
1612 no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
1613 if (!no_upper_bound)
1614 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1615
1616 /* See if X will be in range after truncation towards 0.
1617 To compensate for truncation, move the bounds away from 0,
1618 but reject if X exactly equals the adjusted bounds. */
1619 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1620 if (!no_upper_bound)
1621 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1622 /* If X is a NaN, use zero instead and show we have an overflow.
1623 Otherwise, range check. */
1624 if (REAL_VALUE_ISNAN (x))
1625 overflow = 1, x = dconst0;
1626 else if (! (REAL_VALUES_LESS (l, x)
1627 && !no_upper_bound
1628 && REAL_VALUES_LESS (x, u)))
1629 overflow = 1;
1630
1631 {
1632 HOST_WIDE_INT low, high;
1633 REAL_VALUE_TO_INT (&low, &high, x);
1634 t = build_int_2 (low, high);
1635 }
1636 TREE_TYPE (t) = type;
1637 TREE_OVERFLOW (t)
1638 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1639 TREE_CONSTANT_OVERFLOW (t)
1640 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1641 }
1642 TREE_TYPE (t) = type;
1643 }
1644 else if (TREE_CODE (type) == REAL_TYPE)
1645 {
1646 if (TREE_CODE (arg1) == INTEGER_CST)
1647 return build_real_from_int_cst (type, arg1);
1648 if (TREE_CODE (arg1) == REAL_CST)
1649 {
1650 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1651 {
1652 /* We make a copy of ARG1 so that we don't modify an
1653 existing constant tree. */
1654 t = copy_node (arg1);
1655 TREE_TYPE (t) = type;
1656 return t;
1657 }
1658
1659 t = build_real (type,
1660 real_value_truncate (TYPE_MODE (type),
1661 TREE_REAL_CST (arg1)));
1662
1663 TREE_OVERFLOW (t)
1664 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1665 TREE_CONSTANT_OVERFLOW (t)
1666 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1667 return t;
1668 }
1669 }
1670 TREE_CONSTANT (t) = 1;
1671 return t;
1672 }
1673 \f
1674 /* Return an expr equal to X but certainly not valid as an lvalue. */
1675
1676 tree
1677 non_lvalue (tree x)
1678 {
1679 tree result;
1680
1681 /* These things are certainly not lvalues. */
1682 if (TREE_CODE (x) == NON_LVALUE_EXPR
1683 || TREE_CODE (x) == INTEGER_CST
1684 || TREE_CODE (x) == REAL_CST
1685 || TREE_CODE (x) == STRING_CST
1686 || TREE_CODE (x) == ADDR_EXPR)
1687 return x;
1688
1689 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1690 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1691 return result;
1692 }
1693
1694 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1695 Zero means allow extended lvalues. */
1696
1697 int pedantic_lvalues;
1698
1699 /* When pedantic, return an expr equal to X but certainly not valid as a
1700 pedantic lvalue. Otherwise, return X. */
1701
1702 tree
1703 pedantic_non_lvalue (tree x)
1704 {
1705 if (pedantic_lvalues)
1706 return non_lvalue (x);
1707 else
1708 return x;
1709 }
1710 \f
1711 /* Given a tree comparison code, return the code that is the logical inverse
1712 of the given code. It is not safe to do this for floating-point
1713 comparisons, except for NE_EXPR and EQ_EXPR. */
1714
1715 static enum tree_code
1716 invert_tree_comparison (enum tree_code code)
1717 {
1718 switch (code)
1719 {
1720 case EQ_EXPR:
1721 return NE_EXPR;
1722 case NE_EXPR:
1723 return EQ_EXPR;
1724 case GT_EXPR:
1725 return LE_EXPR;
1726 case GE_EXPR:
1727 return LT_EXPR;
1728 case LT_EXPR:
1729 return GE_EXPR;
1730 case LE_EXPR:
1731 return GT_EXPR;
1732 default:
1733 abort ();
1734 }
1735 }
1736
1737 /* Similar, but return the comparison that results if the operands are
1738 swapped. This is safe for floating-point. */
1739
1740 static enum tree_code
1741 swap_tree_comparison (enum tree_code code)
1742 {
1743 switch (code)
1744 {
1745 case EQ_EXPR:
1746 case NE_EXPR:
1747 return code;
1748 case GT_EXPR:
1749 return LT_EXPR;
1750 case GE_EXPR:
1751 return LE_EXPR;
1752 case LT_EXPR:
1753 return GT_EXPR;
1754 case LE_EXPR:
1755 return GE_EXPR;
1756 default:
1757 abort ();
1758 }
1759 }
1760
1761
1762 /* Convert a comparison tree code from an enum tree_code representation
1763 into a compcode bit-based encoding. This function is the inverse of
1764 compcode_to_comparison. */
1765
1766 static int
1767 comparison_to_compcode (enum tree_code code)
1768 {
1769 switch (code)
1770 {
1771 case LT_EXPR:
1772 return COMPCODE_LT;
1773 case EQ_EXPR:
1774 return COMPCODE_EQ;
1775 case LE_EXPR:
1776 return COMPCODE_LE;
1777 case GT_EXPR:
1778 return COMPCODE_GT;
1779 case NE_EXPR:
1780 return COMPCODE_NE;
1781 case GE_EXPR:
1782 return COMPCODE_GE;
1783 default:
1784 abort ();
1785 }
1786 }
1787
1788 /* Convert a compcode bit-based encoding of a comparison operator back
1789 to GCC's enum tree_code representation. This function is the
1790 inverse of comparison_to_compcode. */
1791
1792 static enum tree_code
1793 compcode_to_comparison (int code)
1794 {
1795 switch (code)
1796 {
1797 case COMPCODE_LT:
1798 return LT_EXPR;
1799 case COMPCODE_EQ:
1800 return EQ_EXPR;
1801 case COMPCODE_LE:
1802 return LE_EXPR;
1803 case COMPCODE_GT:
1804 return GT_EXPR;
1805 case COMPCODE_NE:
1806 return NE_EXPR;
1807 case COMPCODE_GE:
1808 return GE_EXPR;
1809 default:
1810 abort ();
1811 }
1812 }
1813
1814 /* Return nonzero if CODE is a tree code that represents a truth value. */
1815
1816 static int
1817 truth_value_p (enum tree_code code)
1818 {
1819 return (TREE_CODE_CLASS (code) == '<'
1820 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1821 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1822 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1823 }
1824 \f
1825 /* Return nonzero if two operands (typically of the same tree node)
1826 are necessarily equal. If either argument has side-effects this
1827 function returns zero.
1828
1829 If ONLY_CONST is nonzero, only return nonzero for constants.
1830 This function tests whether the operands are indistinguishable;
1831 it does not test whether they are equal using C's == operation.
1832 The distinction is important for IEEE floating point, because
1833 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1834 (2) two NaNs may be indistinguishable, but NaN!=NaN.
1835
1836 If ONLY_CONST is zero, a VAR_DECL is considered equal to itself
1837 even though it may hold multiple values during a function.
1838 This is because a GCC tree node guarantees that nothing else is
1839 executed between the evaluation of its "operands" (which may often
1840 be evaluated in arbitrary order). Hence if the operands themselves
1841 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
1842 same value in each operand/subexpression. Hence a zero value for
1843 ONLY_CONST assumes isochronic (or instantaneous) tree equivalence.
1844 If comparing arbitrary expression trees, such as from different
1845 statements, ONLY_CONST must usually be nonzero. */
1846
1847 int
1848 operand_equal_p (tree arg0, tree arg1, int only_const)
1849 {
1850 tree fndecl;
1851
1852 /* If both types don't have the same signedness, then we can't consider
1853 them equal. We must check this before the STRIP_NOPS calls
1854 because they may change the signedness of the arguments. */
1855 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1856 return 0;
1857
1858 STRIP_NOPS (arg0);
1859 STRIP_NOPS (arg1);
1860
1861 if (TREE_CODE (arg0) != TREE_CODE (arg1)
1862 /* This is needed for conversions and for COMPONENT_REF.
1863 Might as well play it safe and always test this. */
1864 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
1865 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
1866 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1867 return 0;
1868
1869 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1870 We don't care about side effects in that case because the SAVE_EXPR
1871 takes care of that for us. In all other cases, two expressions are
1872 equal if they have no side effects. If we have two identical
1873 expressions with side effects that should be treated the same due
1874 to the only side effects being identical SAVE_EXPR's, that will
1875 be detected in the recursive calls below. */
1876 if (arg0 == arg1 && ! only_const
1877 && (TREE_CODE (arg0) == SAVE_EXPR
1878 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
1879 return 1;
1880
1881 /* Next handle constant cases, those for which we can return 1 even
1882 if ONLY_CONST is set. */
1883 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
1884 switch (TREE_CODE (arg0))
1885 {
1886 case INTEGER_CST:
1887 return (! TREE_CONSTANT_OVERFLOW (arg0)
1888 && ! TREE_CONSTANT_OVERFLOW (arg1)
1889 && tree_int_cst_equal (arg0, arg1));
1890
1891 case REAL_CST:
1892 return (! TREE_CONSTANT_OVERFLOW (arg0)
1893 && ! TREE_CONSTANT_OVERFLOW (arg1)
1894 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
1895 TREE_REAL_CST (arg1)));
1896
1897 case VECTOR_CST:
1898 {
1899 tree v1, v2;
1900
1901 if (TREE_CONSTANT_OVERFLOW (arg0)
1902 || TREE_CONSTANT_OVERFLOW (arg1))
1903 return 0;
1904
1905 v1 = TREE_VECTOR_CST_ELTS (arg0);
1906 v2 = TREE_VECTOR_CST_ELTS (arg1);
1907 while (v1 && v2)
1908 {
1909 if (!operand_equal_p (v1, v2, only_const))
1910 return 0;
1911 v1 = TREE_CHAIN (v1);
1912 v2 = TREE_CHAIN (v2);
1913 }
1914
1915 return 1;
1916 }
1917
1918 case COMPLEX_CST:
1919 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
1920 only_const)
1921 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
1922 only_const));
1923
1924 case STRING_CST:
1925 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
1926 && ! memcmp (TREE_STRING_POINTER (arg0),
1927 TREE_STRING_POINTER (arg1),
1928 TREE_STRING_LENGTH (arg0)));
1929
1930 case ADDR_EXPR:
1931 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
1932 0);
1933 default:
1934 break;
1935 }
1936
1937 if (only_const)
1938 return 0;
1939
1940 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1941 {
1942 case '1':
1943 /* Two conversions are equal only if signedness and modes match. */
1944 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1945 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1946 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1947 return 0;
1948
1949 return operand_equal_p (TREE_OPERAND (arg0, 0),
1950 TREE_OPERAND (arg1, 0), 0);
1951
1952 case '<':
1953 case '2':
1954 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
1955 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
1956 0))
1957 return 1;
1958
1959 /* For commutative ops, allow the other order. */
1960 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
1961 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
1962 || TREE_CODE (arg0) == BIT_IOR_EXPR
1963 || TREE_CODE (arg0) == BIT_XOR_EXPR
1964 || TREE_CODE (arg0) == BIT_AND_EXPR
1965 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
1966 && operand_equal_p (TREE_OPERAND (arg0, 0),
1967 TREE_OPERAND (arg1, 1), 0)
1968 && operand_equal_p (TREE_OPERAND (arg0, 1),
1969 TREE_OPERAND (arg1, 0), 0));
1970
1971 case 'r':
1972 /* If either of the pointer (or reference) expressions we are
1973 dereferencing contain a side effect, these cannot be equal. */
1974 if (TREE_SIDE_EFFECTS (arg0)
1975 || TREE_SIDE_EFFECTS (arg1))
1976 return 0;
1977
1978 switch (TREE_CODE (arg0))
1979 {
1980 case INDIRECT_REF:
1981 return operand_equal_p (TREE_OPERAND (arg0, 0),
1982 TREE_OPERAND (arg1, 0), 0);
1983
1984 case COMPONENT_REF:
1985 case ARRAY_REF:
1986 case ARRAY_RANGE_REF:
1987 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1988 TREE_OPERAND (arg1, 0), 0)
1989 && operand_equal_p (TREE_OPERAND (arg0, 1),
1990 TREE_OPERAND (arg1, 1), 0));
1991
1992 case BIT_FIELD_REF:
1993 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1994 TREE_OPERAND (arg1, 0), 0)
1995 && operand_equal_p (TREE_OPERAND (arg0, 1),
1996 TREE_OPERAND (arg1, 1), 0)
1997 && operand_equal_p (TREE_OPERAND (arg0, 2),
1998 TREE_OPERAND (arg1, 2), 0));
1999 default:
2000 return 0;
2001 }
2002
2003 case 'e':
2004 switch (TREE_CODE (arg0))
2005 {
2006 case ADDR_EXPR:
2007 case TRUTH_NOT_EXPR:
2008 return operand_equal_p (TREE_OPERAND (arg0, 0),
2009 TREE_OPERAND (arg1, 0), 0);
2010
2011 case RTL_EXPR:
2012 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2013
2014 case CALL_EXPR:
2015 /* If the CALL_EXPRs call different functions, then they
2016 clearly can not be equal. */
2017 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2018 TREE_OPERAND (arg1, 0), 0))
2019 return 0;
2020
2021 /* Only consider const functions equivalent. */
2022 fndecl = get_callee_fndecl (arg0);
2023 if (fndecl == NULL_TREE
2024 || ! (flags_from_decl_or_type (fndecl) & ECF_CONST))
2025 return 0;
2026
2027 /* Now see if all the arguments are the same. operand_equal_p
2028 does not handle TREE_LIST, so we walk the operands here
2029 feeding them to operand_equal_p. */
2030 arg0 = TREE_OPERAND (arg0, 1);
2031 arg1 = TREE_OPERAND (arg1, 1);
2032 while (arg0 && arg1)
2033 {
2034 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), 0))
2035 return 0;
2036
2037 arg0 = TREE_CHAIN (arg0);
2038 arg1 = TREE_CHAIN (arg1);
2039 }
2040
2041 /* If we get here and both argument lists are exhausted
2042 then the CALL_EXPRs are equal. */
2043 return ! (arg0 || arg1);
2044
2045 default:
2046 return 0;
2047 }
2048
2049 case 'd':
2050 /* Consider __builtin_sqrt equal to sqrt. */
2051 return TREE_CODE (arg0) == FUNCTION_DECL
2052 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2053 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2054 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1);
2055
2056 default:
2057 return 0;
2058 }
2059 }
2060 \f
2061 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2062 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2063
2064 When in doubt, return 0. */
2065
2066 static int
2067 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2068 {
2069 int unsignedp1, unsignedpo;
2070 tree primarg0, primarg1, primother;
2071 unsigned int correct_width;
2072
2073 if (operand_equal_p (arg0, arg1, 0))
2074 return 1;
2075
2076 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2077 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2078 return 0;
2079
2080 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2081 and see if the inner values are the same. This removes any
2082 signedness comparison, which doesn't matter here. */
2083 primarg0 = arg0, primarg1 = arg1;
2084 STRIP_NOPS (primarg0);
2085 STRIP_NOPS (primarg1);
2086 if (operand_equal_p (primarg0, primarg1, 0))
2087 return 1;
2088
2089 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2090 actual comparison operand, ARG0.
2091
2092 First throw away any conversions to wider types
2093 already present in the operands. */
2094
2095 primarg1 = get_narrower (arg1, &unsignedp1);
2096 primother = get_narrower (other, &unsignedpo);
2097
2098 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2099 if (unsignedp1 == unsignedpo
2100 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2101 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2102 {
2103 tree type = TREE_TYPE (arg0);
2104
2105 /* Make sure shorter operand is extended the right way
2106 to match the longer operand. */
2107 primarg1 = convert ((*lang_hooks.types.signed_or_unsigned_type)
2108 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2109
2110 if (operand_equal_p (arg0, convert (type, primarg1), 0))
2111 return 1;
2112 }
2113
2114 return 0;
2115 }
2116 \f
2117 /* See if ARG is an expression that is either a comparison or is performing
2118 arithmetic on comparisons. The comparisons must only be comparing
2119 two different values, which will be stored in *CVAL1 and *CVAL2; if
2120 they are nonzero it means that some operands have already been found.
2121 No variables may be used anywhere else in the expression except in the
2122 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2123 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2124
2125 If this is true, return 1. Otherwise, return zero. */
2126
2127 static int
2128 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2129 {
2130 enum tree_code code = TREE_CODE (arg);
2131 char class = TREE_CODE_CLASS (code);
2132
2133 /* We can handle some of the 'e' cases here. */
2134 if (class == 'e' && code == TRUTH_NOT_EXPR)
2135 class = '1';
2136 else if (class == 'e'
2137 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2138 || code == COMPOUND_EXPR))
2139 class = '2';
2140
2141 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2142 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2143 {
2144 /* If we've already found a CVAL1 or CVAL2, this expression is
2145 two complex to handle. */
2146 if (*cval1 || *cval2)
2147 return 0;
2148
2149 class = '1';
2150 *save_p = 1;
2151 }
2152
2153 switch (class)
2154 {
2155 case '1':
2156 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2157
2158 case '2':
2159 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2160 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2161 cval1, cval2, save_p));
2162
2163 case 'c':
2164 return 1;
2165
2166 case 'e':
2167 if (code == COND_EXPR)
2168 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2169 cval1, cval2, save_p)
2170 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2171 cval1, cval2, save_p)
2172 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2173 cval1, cval2, save_p));
2174 return 0;
2175
2176 case '<':
2177 /* First see if we can handle the first operand, then the second. For
2178 the second operand, we know *CVAL1 can't be zero. It must be that
2179 one side of the comparison is each of the values; test for the
2180 case where this isn't true by failing if the two operands
2181 are the same. */
2182
2183 if (operand_equal_p (TREE_OPERAND (arg, 0),
2184 TREE_OPERAND (arg, 1), 0))
2185 return 0;
2186
2187 if (*cval1 == 0)
2188 *cval1 = TREE_OPERAND (arg, 0);
2189 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2190 ;
2191 else if (*cval2 == 0)
2192 *cval2 = TREE_OPERAND (arg, 0);
2193 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2194 ;
2195 else
2196 return 0;
2197
2198 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2199 ;
2200 else if (*cval2 == 0)
2201 *cval2 = TREE_OPERAND (arg, 1);
2202 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2203 ;
2204 else
2205 return 0;
2206
2207 return 1;
2208
2209 default:
2210 return 0;
2211 }
2212 }
2213 \f
2214 /* ARG is a tree that is known to contain just arithmetic operations and
2215 comparisons. Evaluate the operations in the tree substituting NEW0 for
2216 any occurrence of OLD0 as an operand of a comparison and likewise for
2217 NEW1 and OLD1. */
2218
2219 static tree
2220 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2221 {
2222 tree type = TREE_TYPE (arg);
2223 enum tree_code code = TREE_CODE (arg);
2224 char class = TREE_CODE_CLASS (code);
2225
2226 /* We can handle some of the 'e' cases here. */
2227 if (class == 'e' && code == TRUTH_NOT_EXPR)
2228 class = '1';
2229 else if (class == 'e'
2230 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2231 class = '2';
2232
2233 switch (class)
2234 {
2235 case '1':
2236 return fold (build1 (code, type,
2237 eval_subst (TREE_OPERAND (arg, 0),
2238 old0, new0, old1, new1)));
2239
2240 case '2':
2241 return fold (build (code, type,
2242 eval_subst (TREE_OPERAND (arg, 0),
2243 old0, new0, old1, new1),
2244 eval_subst (TREE_OPERAND (arg, 1),
2245 old0, new0, old1, new1)));
2246
2247 case 'e':
2248 switch (code)
2249 {
2250 case SAVE_EXPR:
2251 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2252
2253 case COMPOUND_EXPR:
2254 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2255
2256 case COND_EXPR:
2257 return fold (build (code, type,
2258 eval_subst (TREE_OPERAND (arg, 0),
2259 old0, new0, old1, new1),
2260 eval_subst (TREE_OPERAND (arg, 1),
2261 old0, new0, old1, new1),
2262 eval_subst (TREE_OPERAND (arg, 2),
2263 old0, new0, old1, new1)));
2264 default:
2265 break;
2266 }
2267 /* Fall through - ??? */
2268
2269 case '<':
2270 {
2271 tree arg0 = TREE_OPERAND (arg, 0);
2272 tree arg1 = TREE_OPERAND (arg, 1);
2273
2274 /* We need to check both for exact equality and tree equality. The
2275 former will be true if the operand has a side-effect. In that
2276 case, we know the operand occurred exactly once. */
2277
2278 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2279 arg0 = new0;
2280 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2281 arg0 = new1;
2282
2283 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2284 arg1 = new0;
2285 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2286 arg1 = new1;
2287
2288 return fold (build (code, type, arg0, arg1));
2289 }
2290
2291 default:
2292 return arg;
2293 }
2294 }
2295 \f
2296 /* Return a tree for the case when the result of an expression is RESULT
2297 converted to TYPE and OMITTED was previously an operand of the expression
2298 but is now not needed (e.g., we folded OMITTED * 0).
2299
2300 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2301 the conversion of RESULT to TYPE. */
2302
2303 tree
2304 omit_one_operand (tree type, tree result, tree omitted)
2305 {
2306 tree t = convert (type, result);
2307
2308 if (TREE_SIDE_EFFECTS (omitted))
2309 return build (COMPOUND_EXPR, type, omitted, t);
2310
2311 return non_lvalue (t);
2312 }
2313
2314 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2315
2316 static tree
2317 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2318 {
2319 tree t = convert (type, result);
2320
2321 if (TREE_SIDE_EFFECTS (omitted))
2322 return build (COMPOUND_EXPR, type, omitted, t);
2323
2324 return pedantic_non_lvalue (t);
2325 }
2326 \f
2327 /* Return a simplified tree node for the truth-negation of ARG. This
2328 never alters ARG itself. We assume that ARG is an operation that
2329 returns a truth value (0 or 1). */
2330
2331 tree
2332 invert_truthvalue (tree arg)
2333 {
2334 tree type = TREE_TYPE (arg);
2335 enum tree_code code = TREE_CODE (arg);
2336
2337 if (code == ERROR_MARK)
2338 return arg;
2339
2340 /* If this is a comparison, we can simply invert it, except for
2341 floating-point non-equality comparisons, in which case we just
2342 enclose a TRUTH_NOT_EXPR around what we have. */
2343
2344 if (TREE_CODE_CLASS (code) == '<')
2345 {
2346 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2347 && !flag_unsafe_math_optimizations
2348 && code != NE_EXPR
2349 && code != EQ_EXPR)
2350 return build1 (TRUTH_NOT_EXPR, type, arg);
2351 else
2352 return build (invert_tree_comparison (code), type,
2353 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2354 }
2355
2356 switch (code)
2357 {
2358 case INTEGER_CST:
2359 return convert (type, build_int_2 (integer_zerop (arg), 0));
2360
2361 case TRUTH_AND_EXPR:
2362 return build (TRUTH_OR_EXPR, type,
2363 invert_truthvalue (TREE_OPERAND (arg, 0)),
2364 invert_truthvalue (TREE_OPERAND (arg, 1)));
2365
2366 case TRUTH_OR_EXPR:
2367 return build (TRUTH_AND_EXPR, type,
2368 invert_truthvalue (TREE_OPERAND (arg, 0)),
2369 invert_truthvalue (TREE_OPERAND (arg, 1)));
2370
2371 case TRUTH_XOR_EXPR:
2372 /* Here we can invert either operand. We invert the first operand
2373 unless the second operand is a TRUTH_NOT_EXPR in which case our
2374 result is the XOR of the first operand with the inside of the
2375 negation of the second operand. */
2376
2377 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2378 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2379 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2380 else
2381 return build (TRUTH_XOR_EXPR, type,
2382 invert_truthvalue (TREE_OPERAND (arg, 0)),
2383 TREE_OPERAND (arg, 1));
2384
2385 case TRUTH_ANDIF_EXPR:
2386 return build (TRUTH_ORIF_EXPR, type,
2387 invert_truthvalue (TREE_OPERAND (arg, 0)),
2388 invert_truthvalue (TREE_OPERAND (arg, 1)));
2389
2390 case TRUTH_ORIF_EXPR:
2391 return build (TRUTH_ANDIF_EXPR, type,
2392 invert_truthvalue (TREE_OPERAND (arg, 0)),
2393 invert_truthvalue (TREE_OPERAND (arg, 1)));
2394
2395 case TRUTH_NOT_EXPR:
2396 return TREE_OPERAND (arg, 0);
2397
2398 case COND_EXPR:
2399 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2400 invert_truthvalue (TREE_OPERAND (arg, 1)),
2401 invert_truthvalue (TREE_OPERAND (arg, 2)));
2402
2403 case COMPOUND_EXPR:
2404 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2405 invert_truthvalue (TREE_OPERAND (arg, 1)));
2406
2407 case WITH_RECORD_EXPR:
2408 return build (WITH_RECORD_EXPR, type,
2409 invert_truthvalue (TREE_OPERAND (arg, 0)),
2410 TREE_OPERAND (arg, 1));
2411
2412 case NON_LVALUE_EXPR:
2413 return invert_truthvalue (TREE_OPERAND (arg, 0));
2414
2415 case NOP_EXPR:
2416 case CONVERT_EXPR:
2417 case FLOAT_EXPR:
2418 return build1 (TREE_CODE (arg), type,
2419 invert_truthvalue (TREE_OPERAND (arg, 0)));
2420
2421 case BIT_AND_EXPR:
2422 if (!integer_onep (TREE_OPERAND (arg, 1)))
2423 break;
2424 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2425
2426 case SAVE_EXPR:
2427 return build1 (TRUTH_NOT_EXPR, type, arg);
2428
2429 case CLEANUP_POINT_EXPR:
2430 return build1 (CLEANUP_POINT_EXPR, type,
2431 invert_truthvalue (TREE_OPERAND (arg, 0)));
2432
2433 default:
2434 break;
2435 }
2436 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2437 abort ();
2438 return build1 (TRUTH_NOT_EXPR, type, arg);
2439 }
2440
2441 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2442 operands are another bit-wise operation with a common input. If so,
2443 distribute the bit operations to save an operation and possibly two if
2444 constants are involved. For example, convert
2445 (A | B) & (A | C) into A | (B & C)
2446 Further simplification will occur if B and C are constants.
2447
2448 If this optimization cannot be done, 0 will be returned. */
2449
2450 static tree
2451 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2452 {
2453 tree common;
2454 tree left, right;
2455
2456 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2457 || TREE_CODE (arg0) == code
2458 || (TREE_CODE (arg0) != BIT_AND_EXPR
2459 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2460 return 0;
2461
2462 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2463 {
2464 common = TREE_OPERAND (arg0, 0);
2465 left = TREE_OPERAND (arg0, 1);
2466 right = TREE_OPERAND (arg1, 1);
2467 }
2468 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2469 {
2470 common = TREE_OPERAND (arg0, 0);
2471 left = TREE_OPERAND (arg0, 1);
2472 right = TREE_OPERAND (arg1, 0);
2473 }
2474 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2475 {
2476 common = TREE_OPERAND (arg0, 1);
2477 left = TREE_OPERAND (arg0, 0);
2478 right = TREE_OPERAND (arg1, 1);
2479 }
2480 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2481 {
2482 common = TREE_OPERAND (arg0, 1);
2483 left = TREE_OPERAND (arg0, 0);
2484 right = TREE_OPERAND (arg1, 0);
2485 }
2486 else
2487 return 0;
2488
2489 return fold (build (TREE_CODE (arg0), type, common,
2490 fold (build (code, type, left, right))));
2491 }
2492 \f
2493 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2494 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2495
2496 static tree
2497 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
2498 int unsignedp)
2499 {
2500 tree result = build (BIT_FIELD_REF, type, inner,
2501 size_int (bitsize), bitsize_int (bitpos));
2502
2503 TREE_UNSIGNED (result) = unsignedp;
2504
2505 return result;
2506 }
2507
2508 /* Optimize a bit-field compare.
2509
2510 There are two cases: First is a compare against a constant and the
2511 second is a comparison of two items where the fields are at the same
2512 bit position relative to the start of a chunk (byte, halfword, word)
2513 large enough to contain it. In these cases we can avoid the shift
2514 implicit in bitfield extractions.
2515
2516 For constants, we emit a compare of the shifted constant with the
2517 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2518 compared. For two fields at the same position, we do the ANDs with the
2519 similar mask and compare the result of the ANDs.
2520
2521 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2522 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2523 are the left and right operands of the comparison, respectively.
2524
2525 If the optimization described above can be done, we return the resulting
2526 tree. Otherwise we return zero. */
2527
2528 static tree
2529 optimize_bit_field_compare (enum tree_code code, tree compare_type,
2530 tree lhs, tree rhs)
2531 {
2532 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2533 tree type = TREE_TYPE (lhs);
2534 tree signed_type, unsigned_type;
2535 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2536 enum machine_mode lmode, rmode, nmode;
2537 int lunsignedp, runsignedp;
2538 int lvolatilep = 0, rvolatilep = 0;
2539 tree linner, rinner = NULL_TREE;
2540 tree mask;
2541 tree offset;
2542
2543 /* Get all the information about the extractions being done. If the bit size
2544 if the same as the size of the underlying object, we aren't doing an
2545 extraction at all and so can do nothing. We also don't want to
2546 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2547 then will no longer be able to replace it. */
2548 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2549 &lunsignedp, &lvolatilep);
2550 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2551 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2552 return 0;
2553
2554 if (!const_p)
2555 {
2556 /* If this is not a constant, we can only do something if bit positions,
2557 sizes, and signedness are the same. */
2558 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2559 &runsignedp, &rvolatilep);
2560
2561 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2562 || lunsignedp != runsignedp || offset != 0
2563 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2564 return 0;
2565 }
2566
2567 /* See if we can find a mode to refer to this field. We should be able to,
2568 but fail if we can't. */
2569 nmode = get_best_mode (lbitsize, lbitpos,
2570 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2571 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2572 TYPE_ALIGN (TREE_TYPE (rinner))),
2573 word_mode, lvolatilep || rvolatilep);
2574 if (nmode == VOIDmode)
2575 return 0;
2576
2577 /* Set signed and unsigned types of the precision of this mode for the
2578 shifts below. */
2579 signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2580 unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2581
2582 /* Compute the bit position and size for the new reference and our offset
2583 within it. If the new reference is the same size as the original, we
2584 won't optimize anything, so return zero. */
2585 nbitsize = GET_MODE_BITSIZE (nmode);
2586 nbitpos = lbitpos & ~ (nbitsize - 1);
2587 lbitpos -= nbitpos;
2588 if (nbitsize == lbitsize)
2589 return 0;
2590
2591 if (BYTES_BIG_ENDIAN)
2592 lbitpos = nbitsize - lbitsize - lbitpos;
2593
2594 /* Make the mask to be used against the extracted field. */
2595 mask = build_int_2 (~0, ~0);
2596 TREE_TYPE (mask) = unsigned_type;
2597 force_fit_type (mask, 0);
2598 mask = convert (unsigned_type, mask);
2599 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2600 mask = const_binop (RSHIFT_EXPR, mask,
2601 size_int (nbitsize - lbitsize - lbitpos), 0);
2602
2603 if (! const_p)
2604 /* If not comparing with constant, just rework the comparison
2605 and return. */
2606 return build (code, compare_type,
2607 build (BIT_AND_EXPR, unsigned_type,
2608 make_bit_field_ref (linner, unsigned_type,
2609 nbitsize, nbitpos, 1),
2610 mask),
2611 build (BIT_AND_EXPR, unsigned_type,
2612 make_bit_field_ref (rinner, unsigned_type,
2613 nbitsize, nbitpos, 1),
2614 mask));
2615
2616 /* Otherwise, we are handling the constant case. See if the constant is too
2617 big for the field. Warn and return a tree of for 0 (false) if so. We do
2618 this not only for its own sake, but to avoid having to test for this
2619 error case below. If we didn't, we might generate wrong code.
2620
2621 For unsigned fields, the constant shifted right by the field length should
2622 be all zero. For signed fields, the high-order bits should agree with
2623 the sign bit. */
2624
2625 if (lunsignedp)
2626 {
2627 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2628 convert (unsigned_type, rhs),
2629 size_int (lbitsize), 0)))
2630 {
2631 warning ("comparison is always %d due to width of bit-field",
2632 code == NE_EXPR);
2633 return convert (compare_type,
2634 (code == NE_EXPR
2635 ? integer_one_node : integer_zero_node));
2636 }
2637 }
2638 else
2639 {
2640 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2641 size_int (lbitsize - 1), 0);
2642 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2643 {
2644 warning ("comparison is always %d due to width of bit-field",
2645 code == NE_EXPR);
2646 return convert (compare_type,
2647 (code == NE_EXPR
2648 ? integer_one_node : integer_zero_node));
2649 }
2650 }
2651
2652 /* Single-bit compares should always be against zero. */
2653 if (lbitsize == 1 && ! integer_zerop (rhs))
2654 {
2655 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2656 rhs = convert (type, integer_zero_node);
2657 }
2658
2659 /* Make a new bitfield reference, shift the constant over the
2660 appropriate number of bits and mask it with the computed mask
2661 (in case this was a signed field). If we changed it, make a new one. */
2662 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2663 if (lvolatilep)
2664 {
2665 TREE_SIDE_EFFECTS (lhs) = 1;
2666 TREE_THIS_VOLATILE (lhs) = 1;
2667 }
2668
2669 rhs = fold (const_binop (BIT_AND_EXPR,
2670 const_binop (LSHIFT_EXPR,
2671 convert (unsigned_type, rhs),
2672 size_int (lbitpos), 0),
2673 mask, 0));
2674
2675 return build (code, compare_type,
2676 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2677 rhs);
2678 }
2679 \f
2680 /* Subroutine for fold_truthop: decode a field reference.
2681
2682 If EXP is a comparison reference, we return the innermost reference.
2683
2684 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2685 set to the starting bit number.
2686
2687 If the innermost field can be completely contained in a mode-sized
2688 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2689
2690 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2691 otherwise it is not changed.
2692
2693 *PUNSIGNEDP is set to the signedness of the field.
2694
2695 *PMASK is set to the mask used. This is either contained in a
2696 BIT_AND_EXPR or derived from the width of the field.
2697
2698 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2699
2700 Return 0 if this is not a component reference or is one that we can't
2701 do anything with. */
2702
2703 static tree
2704 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
2705 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
2706 int *punsignedp, int *pvolatilep,
2707 tree *pmask, tree *pand_mask)
2708 {
2709 tree outer_type = 0;
2710 tree and_mask = 0;
2711 tree mask, inner, offset;
2712 tree unsigned_type;
2713 unsigned int precision;
2714
2715 /* All the optimizations using this function assume integer fields.
2716 There are problems with FP fields since the type_for_size call
2717 below can fail for, e.g., XFmode. */
2718 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2719 return 0;
2720
2721 /* We are interested in the bare arrangement of bits, so strip everything
2722 that doesn't affect the machine mode. However, record the type of the
2723 outermost expression if it may matter below. */
2724 if (TREE_CODE (exp) == NOP_EXPR
2725 || TREE_CODE (exp) == CONVERT_EXPR
2726 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2727 outer_type = TREE_TYPE (exp);
2728 STRIP_NOPS (exp);
2729
2730 if (TREE_CODE (exp) == BIT_AND_EXPR)
2731 {
2732 and_mask = TREE_OPERAND (exp, 1);
2733 exp = TREE_OPERAND (exp, 0);
2734 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2735 if (TREE_CODE (and_mask) != INTEGER_CST)
2736 return 0;
2737 }
2738
2739 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2740 punsignedp, pvolatilep);
2741 if ((inner == exp && and_mask == 0)
2742 || *pbitsize < 0 || offset != 0
2743 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
2744 return 0;
2745
2746 /* If the number of bits in the reference is the same as the bitsize of
2747 the outer type, then the outer type gives the signedness. Otherwise
2748 (in case of a small bitfield) the signedness is unchanged. */
2749 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
2750 *punsignedp = TREE_UNSIGNED (outer_type);
2751
2752 /* Compute the mask to access the bitfield. */
2753 unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
2754 precision = TYPE_PRECISION (unsigned_type);
2755
2756 mask = build_int_2 (~0, ~0);
2757 TREE_TYPE (mask) = unsigned_type;
2758 force_fit_type (mask, 0);
2759 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2760 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2761
2762 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2763 if (and_mask != 0)
2764 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2765 convert (unsigned_type, and_mask), mask));
2766
2767 *pmask = mask;
2768 *pand_mask = and_mask;
2769 return inner;
2770 }
2771
2772 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
2773 bit positions. */
2774
2775 static int
2776 all_ones_mask_p (tree mask, int size)
2777 {
2778 tree type = TREE_TYPE (mask);
2779 unsigned int precision = TYPE_PRECISION (type);
2780 tree tmask;
2781
2782 tmask = build_int_2 (~0, ~0);
2783 TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
2784 force_fit_type (tmask, 0);
2785 return
2786 tree_int_cst_equal (mask,
2787 const_binop (RSHIFT_EXPR,
2788 const_binop (LSHIFT_EXPR, tmask,
2789 size_int (precision - size),
2790 0),
2791 size_int (precision - size), 0));
2792 }
2793
2794 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
2795 represents the sign bit of EXP's type. If EXP represents a sign
2796 or zero extension, also test VAL against the unextended type.
2797 The return value is the (sub)expression whose sign bit is VAL,
2798 or NULL_TREE otherwise. */
2799
2800 static tree
2801 sign_bit_p (tree exp, tree val)
2802 {
2803 unsigned HOST_WIDE_INT mask_lo, lo;
2804 HOST_WIDE_INT mask_hi, hi;
2805 int width;
2806 tree t;
2807
2808 /* Tree EXP must have an integral type. */
2809 t = TREE_TYPE (exp);
2810 if (! INTEGRAL_TYPE_P (t))
2811 return NULL_TREE;
2812
2813 /* Tree VAL must be an integer constant. */
2814 if (TREE_CODE (val) != INTEGER_CST
2815 || TREE_CONSTANT_OVERFLOW (val))
2816 return NULL_TREE;
2817
2818 width = TYPE_PRECISION (t);
2819 if (width > HOST_BITS_PER_WIDE_INT)
2820 {
2821 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
2822 lo = 0;
2823
2824 mask_hi = ((unsigned HOST_WIDE_INT) -1
2825 >> (2 * HOST_BITS_PER_WIDE_INT - width));
2826 mask_lo = -1;
2827 }
2828 else
2829 {
2830 hi = 0;
2831 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
2832
2833 mask_hi = 0;
2834 mask_lo = ((unsigned HOST_WIDE_INT) -1
2835 >> (HOST_BITS_PER_WIDE_INT - width));
2836 }
2837
2838 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
2839 treat VAL as if it were unsigned. */
2840 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
2841 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
2842 return exp;
2843
2844 /* Handle extension from a narrower type. */
2845 if (TREE_CODE (exp) == NOP_EXPR
2846 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
2847 return sign_bit_p (TREE_OPERAND (exp, 0), val);
2848
2849 return NULL_TREE;
2850 }
2851
2852 /* Subroutine for fold_truthop: determine if an operand is simple enough
2853 to be evaluated unconditionally. */
2854
2855 static int
2856 simple_operand_p (tree exp)
2857 {
2858 /* Strip any conversions that don't change the machine mode. */
2859 while ((TREE_CODE (exp) == NOP_EXPR
2860 || TREE_CODE (exp) == CONVERT_EXPR)
2861 && (TYPE_MODE (TREE_TYPE (exp))
2862 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2863 exp = TREE_OPERAND (exp, 0);
2864
2865 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2866 || (DECL_P (exp)
2867 && ! TREE_ADDRESSABLE (exp)
2868 && ! TREE_THIS_VOLATILE (exp)
2869 && ! DECL_NONLOCAL (exp)
2870 /* Don't regard global variables as simple. They may be
2871 allocated in ways unknown to the compiler (shared memory,
2872 #pragma weak, etc). */
2873 && ! TREE_PUBLIC (exp)
2874 && ! DECL_EXTERNAL (exp)
2875 /* Loading a static variable is unduly expensive, but global
2876 registers aren't expensive. */
2877 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2878 }
2879 \f
2880 /* The following functions are subroutines to fold_range_test and allow it to
2881 try to change a logical combination of comparisons into a range test.
2882
2883 For example, both
2884 X == 2 || X == 3 || X == 4 || X == 5
2885 and
2886 X >= 2 && X <= 5
2887 are converted to
2888 (unsigned) (X - 2) <= 3
2889
2890 We describe each set of comparisons as being either inside or outside
2891 a range, using a variable named like IN_P, and then describe the
2892 range with a lower and upper bound. If one of the bounds is omitted,
2893 it represents either the highest or lowest value of the type.
2894
2895 In the comments below, we represent a range by two numbers in brackets
2896 preceded by a "+" to designate being inside that range, or a "-" to
2897 designate being outside that range, so the condition can be inverted by
2898 flipping the prefix. An omitted bound is represented by a "-". For
2899 example, "- [-, 10]" means being outside the range starting at the lowest
2900 possible value and ending at 10, in other words, being greater than 10.
2901 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
2902 always false.
2903
2904 We set up things so that the missing bounds are handled in a consistent
2905 manner so neither a missing bound nor "true" and "false" need to be
2906 handled using a special case. */
2907
2908 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
2909 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
2910 and UPPER1_P are nonzero if the respective argument is an upper bound
2911 and zero for a lower. TYPE, if nonzero, is the type of the result; it
2912 must be specified for a comparison. ARG1 will be converted to ARG0's
2913 type if both are specified. */
2914
2915 static tree
2916 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
2917 tree arg1, int upper1_p)
2918 {
2919 tree tem;
2920 int result;
2921 int sgn0, sgn1;
2922
2923 /* If neither arg represents infinity, do the normal operation.
2924 Else, if not a comparison, return infinity. Else handle the special
2925 comparison rules. Note that most of the cases below won't occur, but
2926 are handled for consistency. */
2927
2928 if (arg0 != 0 && arg1 != 0)
2929 {
2930 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
2931 arg0, convert (TREE_TYPE (arg0), arg1)));
2932 STRIP_NOPS (tem);
2933 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
2934 }
2935
2936 if (TREE_CODE_CLASS (code) != '<')
2937 return 0;
2938
2939 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
2940 for neither. In real maths, we cannot assume open ended ranges are
2941 the same. But, this is computer arithmetic, where numbers are finite.
2942 We can therefore make the transformation of any unbounded range with
2943 the value Z, Z being greater than any representable number. This permits
2944 us to treat unbounded ranges as equal. */
2945 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
2946 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
2947 switch (code)
2948 {
2949 case EQ_EXPR:
2950 result = sgn0 == sgn1;
2951 break;
2952 case NE_EXPR:
2953 result = sgn0 != sgn1;
2954 break;
2955 case LT_EXPR:
2956 result = sgn0 < sgn1;
2957 break;
2958 case LE_EXPR:
2959 result = sgn0 <= sgn1;
2960 break;
2961 case GT_EXPR:
2962 result = sgn0 > sgn1;
2963 break;
2964 case GE_EXPR:
2965 result = sgn0 >= sgn1;
2966 break;
2967 default:
2968 abort ();
2969 }
2970
2971 return convert (type, result ? integer_one_node : integer_zero_node);
2972 }
2973 \f
2974 /* Given EXP, a logical expression, set the range it is testing into
2975 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
2976 actually being tested. *PLOW and *PHIGH will be made of the same type
2977 as the returned expression. If EXP is not a comparison, we will most
2978 likely not be returning a useful value and range. */
2979
2980 static tree
2981 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
2982 {
2983 enum tree_code code;
2984 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
2985 tree orig_type = NULL_TREE;
2986 int in_p, n_in_p;
2987 tree low, high, n_low, n_high;
2988
2989 /* Start with simply saying "EXP != 0" and then look at the code of EXP
2990 and see if we can refine the range. Some of the cases below may not
2991 happen, but it doesn't seem worth worrying about this. We "continue"
2992 the outer loop when we've changed something; otherwise we "break"
2993 the switch, which will "break" the while. */
2994
2995 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
2996
2997 while (1)
2998 {
2999 code = TREE_CODE (exp);
3000
3001 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3002 {
3003 if (first_rtl_op (code) > 0)
3004 arg0 = TREE_OPERAND (exp, 0);
3005 if (TREE_CODE_CLASS (code) == '<'
3006 || TREE_CODE_CLASS (code) == '1'
3007 || TREE_CODE_CLASS (code) == '2')
3008 type = TREE_TYPE (arg0);
3009 if (TREE_CODE_CLASS (code) == '2'
3010 || TREE_CODE_CLASS (code) == '<'
3011 || (TREE_CODE_CLASS (code) == 'e'
3012 && TREE_CODE_LENGTH (code) > 1))
3013 arg1 = TREE_OPERAND (exp, 1);
3014 }
3015
3016 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3017 lose a cast by accident. */
3018 if (type != NULL_TREE && orig_type == NULL_TREE)
3019 orig_type = type;
3020
3021 switch (code)
3022 {
3023 case TRUTH_NOT_EXPR:
3024 in_p = ! in_p, exp = arg0;
3025 continue;
3026
3027 case EQ_EXPR: case NE_EXPR:
3028 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3029 /* We can only do something if the range is testing for zero
3030 and if the second operand is an integer constant. Note that
3031 saying something is "in" the range we make is done by
3032 complementing IN_P since it will set in the initial case of
3033 being not equal to zero; "out" is leaving it alone. */
3034 if (low == 0 || high == 0
3035 || ! integer_zerop (low) || ! integer_zerop (high)
3036 || TREE_CODE (arg1) != INTEGER_CST)
3037 break;
3038
3039 switch (code)
3040 {
3041 case NE_EXPR: /* - [c, c] */
3042 low = high = arg1;
3043 break;
3044 case EQ_EXPR: /* + [c, c] */
3045 in_p = ! in_p, low = high = arg1;
3046 break;
3047 case GT_EXPR: /* - [-, c] */
3048 low = 0, high = arg1;
3049 break;
3050 case GE_EXPR: /* + [c, -] */
3051 in_p = ! in_p, low = arg1, high = 0;
3052 break;
3053 case LT_EXPR: /* - [c, -] */
3054 low = arg1, high = 0;
3055 break;
3056 case LE_EXPR: /* + [-, c] */
3057 in_p = ! in_p, low = 0, high = arg1;
3058 break;
3059 default:
3060 abort ();
3061 }
3062
3063 exp = arg0;
3064
3065 /* If this is an unsigned comparison, we also know that EXP is
3066 greater than or equal to zero. We base the range tests we make
3067 on that fact, so we record it here so we can parse existing
3068 range tests. */
3069 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3070 {
3071 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3072 1, convert (type, integer_zero_node),
3073 NULL_TREE))
3074 break;
3075
3076 in_p = n_in_p, low = n_low, high = n_high;
3077
3078 /* If the high bound is missing, but we
3079 have a low bound, reverse the range so
3080 it goes from zero to the low bound minus 1. */
3081 if (high == 0 && low)
3082 {
3083 in_p = ! in_p;
3084 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3085 integer_one_node, 0);
3086 low = convert (type, integer_zero_node);
3087 }
3088 }
3089 continue;
3090
3091 case NEGATE_EXPR:
3092 /* (-x) IN [a,b] -> x in [-b, -a] */
3093 n_low = range_binop (MINUS_EXPR, type,
3094 convert (type, integer_zero_node), 0, high, 1);
3095 n_high = range_binop (MINUS_EXPR, type,
3096 convert (type, integer_zero_node), 0, low, 0);
3097 low = n_low, high = n_high;
3098 exp = arg0;
3099 continue;
3100
3101 case BIT_NOT_EXPR:
3102 /* ~ X -> -X - 1 */
3103 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3104 convert (type, integer_one_node));
3105 continue;
3106
3107 case PLUS_EXPR: case MINUS_EXPR:
3108 if (TREE_CODE (arg1) != INTEGER_CST)
3109 break;
3110
3111 /* If EXP is signed, any overflow in the computation is undefined,
3112 so we don't worry about it so long as our computations on
3113 the bounds don't overflow. For unsigned, overflow is defined
3114 and this is exactly the right thing. */
3115 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3116 type, low, 0, arg1, 0);
3117 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3118 type, high, 1, arg1, 0);
3119 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3120 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3121 break;
3122
3123 /* Check for an unsigned range which has wrapped around the maximum
3124 value thus making n_high < n_low, and normalize it. */
3125 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3126 {
3127 low = range_binop (PLUS_EXPR, type, n_high, 0,
3128 integer_one_node, 0);
3129 high = range_binop (MINUS_EXPR, type, n_low, 0,
3130 integer_one_node, 0);
3131
3132 /* If the range is of the form +/- [ x+1, x ], we won't
3133 be able to normalize it. But then, it represents the
3134 whole range or the empty set, so make it
3135 +/- [ -, - ]. */
3136 if (tree_int_cst_equal (n_low, low)
3137 && tree_int_cst_equal (n_high, high))
3138 low = high = 0;
3139 else
3140 in_p = ! in_p;
3141 }
3142 else
3143 low = n_low, high = n_high;
3144
3145 exp = arg0;
3146 continue;
3147
3148 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3149 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3150 break;
3151
3152 if (! INTEGRAL_TYPE_P (type)
3153 || (low != 0 && ! int_fits_type_p (low, type))
3154 || (high != 0 && ! int_fits_type_p (high, type)))
3155 break;
3156
3157 n_low = low, n_high = high;
3158
3159 if (n_low != 0)
3160 n_low = convert (type, n_low);
3161
3162 if (n_high != 0)
3163 n_high = convert (type, n_high);
3164
3165 /* If we're converting from an unsigned to a signed type,
3166 we will be doing the comparison as unsigned. The tests above
3167 have already verified that LOW and HIGH are both positive.
3168
3169 So we have to make sure that the original unsigned value will
3170 be interpreted as positive. */
3171 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3172 {
3173 tree equiv_type = (*lang_hooks.types.type_for_mode)
3174 (TYPE_MODE (type), 1);
3175 tree high_positive;
3176
3177 /* A range without an upper bound is, naturally, unbounded.
3178 Since convert would have cropped a very large value, use
3179 the max value for the destination type. */
3180 high_positive
3181 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3182 : TYPE_MAX_VALUE (type);
3183
3184 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3185 high_positive = fold (build (RSHIFT_EXPR, type,
3186 convert (type, high_positive),
3187 convert (type, integer_one_node)));
3188
3189 /* If the low bound is specified, "and" the range with the
3190 range for which the original unsigned value will be
3191 positive. */
3192 if (low != 0)
3193 {
3194 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3195 1, n_low, n_high,
3196 1, convert (type, integer_zero_node),
3197 high_positive))
3198 break;
3199
3200 in_p = (n_in_p == in_p);
3201 }
3202 else
3203 {
3204 /* Otherwise, "or" the range with the range of the input
3205 that will be interpreted as negative. */
3206 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3207 0, n_low, n_high,
3208 1, convert (type, integer_zero_node),
3209 high_positive))
3210 break;
3211
3212 in_p = (in_p != n_in_p);
3213 }
3214 }
3215
3216 exp = arg0;
3217 low = n_low, high = n_high;
3218 continue;
3219
3220 default:
3221 break;
3222 }
3223
3224 break;
3225 }
3226
3227 /* If EXP is a constant, we can evaluate whether this is true or false. */
3228 if (TREE_CODE (exp) == INTEGER_CST)
3229 {
3230 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3231 exp, 0, low, 0))
3232 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3233 exp, 1, high, 1)));
3234 low = high = 0;
3235 exp = 0;
3236 }
3237
3238 *pin_p = in_p, *plow = low, *phigh = high;
3239 return exp;
3240 }
3241 \f
3242 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3243 type, TYPE, return an expression to test if EXP is in (or out of, depending
3244 on IN_P) the range. */
3245
3246 static tree
3247 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3248 {
3249 tree etype = TREE_TYPE (exp);
3250 tree value;
3251
3252 if (! in_p
3253 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3254 return invert_truthvalue (value);
3255
3256 if (low == 0 && high == 0)
3257 return convert (type, integer_one_node);
3258
3259 if (low == 0)
3260 return fold (build (LE_EXPR, type, exp, high));
3261
3262 if (high == 0)
3263 return fold (build (GE_EXPR, type, exp, low));
3264
3265 if (operand_equal_p (low, high, 0))
3266 return fold (build (EQ_EXPR, type, exp, low));
3267
3268 if (integer_zerop (low))
3269 {
3270 if (! TREE_UNSIGNED (etype))
3271 {
3272 etype = (*lang_hooks.types.unsigned_type) (etype);
3273 high = convert (etype, high);
3274 exp = convert (etype, exp);
3275 }
3276 return build_range_check (type, exp, 1, 0, high);
3277 }
3278
3279 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3280 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3281 {
3282 unsigned HOST_WIDE_INT lo;
3283 HOST_WIDE_INT hi;
3284 int prec;
3285
3286 prec = TYPE_PRECISION (etype);
3287 if (prec <= HOST_BITS_PER_WIDE_INT)
3288 {
3289 hi = 0;
3290 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3291 }
3292 else
3293 {
3294 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3295 lo = (unsigned HOST_WIDE_INT) -1;
3296 }
3297
3298 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3299 {
3300 if (TREE_UNSIGNED (etype))
3301 {
3302 etype = (*lang_hooks.types.signed_type) (etype);
3303 exp = convert (etype, exp);
3304 }
3305 return fold (build (GT_EXPR, type, exp,
3306 convert (etype, integer_zero_node)));
3307 }
3308 }
3309
3310 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3311 && ! TREE_OVERFLOW (value))
3312 return build_range_check (type,
3313 fold (build (MINUS_EXPR, etype, exp, low)),
3314 1, convert (etype, integer_zero_node), value);
3315
3316 return 0;
3317 }
3318 \f
3319 /* Given two ranges, see if we can merge them into one. Return 1 if we
3320 can, 0 if we can't. Set the output range into the specified parameters. */
3321
3322 static int
3323 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3324 tree high0, int in1_p, tree low1, tree high1)
3325 {
3326 int no_overlap;
3327 int subset;
3328 int temp;
3329 tree tem;
3330 int in_p;
3331 tree low, high;
3332 int lowequal = ((low0 == 0 && low1 == 0)
3333 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3334 low0, 0, low1, 0)));
3335 int highequal = ((high0 == 0 && high1 == 0)
3336 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3337 high0, 1, high1, 1)));
3338
3339 /* Make range 0 be the range that starts first, or ends last if they
3340 start at the same value. Swap them if it isn't. */
3341 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3342 low0, 0, low1, 0))
3343 || (lowequal
3344 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3345 high1, 1, high0, 1))))
3346 {
3347 temp = in0_p, in0_p = in1_p, in1_p = temp;
3348 tem = low0, low0 = low1, low1 = tem;
3349 tem = high0, high0 = high1, high1 = tem;
3350 }
3351
3352 /* Now flag two cases, whether the ranges are disjoint or whether the
3353 second range is totally subsumed in the first. Note that the tests
3354 below are simplified by the ones above. */
3355 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3356 high0, 1, low1, 0));
3357 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3358 high1, 1, high0, 1));
3359
3360 /* We now have four cases, depending on whether we are including or
3361 excluding the two ranges. */
3362 if (in0_p && in1_p)
3363 {
3364 /* If they don't overlap, the result is false. If the second range
3365 is a subset it is the result. Otherwise, the range is from the start
3366 of the second to the end of the first. */
3367 if (no_overlap)
3368 in_p = 0, low = high = 0;
3369 else if (subset)
3370 in_p = 1, low = low1, high = high1;
3371 else
3372 in_p = 1, low = low1, high = high0;
3373 }
3374
3375 else if (in0_p && ! in1_p)
3376 {
3377 /* If they don't overlap, the result is the first range. If they are
3378 equal, the result is false. If the second range is a subset of the
3379 first, and the ranges begin at the same place, we go from just after
3380 the end of the first range to the end of the second. If the second
3381 range is not a subset of the first, or if it is a subset and both
3382 ranges end at the same place, the range starts at the start of the
3383 first range and ends just before the second range.
3384 Otherwise, we can't describe this as a single range. */
3385 if (no_overlap)
3386 in_p = 1, low = low0, high = high0;
3387 else if (lowequal && highequal)
3388 in_p = 0, low = high = 0;
3389 else if (subset && lowequal)
3390 {
3391 in_p = 1, high = high0;
3392 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3393 integer_one_node, 0);
3394 }
3395 else if (! subset || highequal)
3396 {
3397 in_p = 1, low = low0;
3398 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3399 integer_one_node, 0);
3400 }
3401 else
3402 return 0;
3403 }
3404
3405 else if (! in0_p && in1_p)
3406 {
3407 /* If they don't overlap, the result is the second range. If the second
3408 is a subset of the first, the result is false. Otherwise,
3409 the range starts just after the first range and ends at the
3410 end of the second. */
3411 if (no_overlap)
3412 in_p = 1, low = low1, high = high1;
3413 else if (subset || highequal)
3414 in_p = 0, low = high = 0;
3415 else
3416 {
3417 in_p = 1, high = high1;
3418 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3419 integer_one_node, 0);
3420 }
3421 }
3422
3423 else
3424 {
3425 /* The case where we are excluding both ranges. Here the complex case
3426 is if they don't overlap. In that case, the only time we have a
3427 range is if they are adjacent. If the second is a subset of the
3428 first, the result is the first. Otherwise, the range to exclude
3429 starts at the beginning of the first range and ends at the end of the
3430 second. */
3431 if (no_overlap)
3432 {
3433 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3434 range_binop (PLUS_EXPR, NULL_TREE,
3435 high0, 1,
3436 integer_one_node, 1),
3437 1, low1, 0)))
3438 in_p = 0, low = low0, high = high1;
3439 else
3440 return 0;
3441 }
3442 else if (subset)
3443 in_p = 0, low = low0, high = high0;
3444 else
3445 in_p = 0, low = low0, high = high1;
3446 }
3447
3448 *pin_p = in_p, *plow = low, *phigh = high;
3449 return 1;
3450 }
3451 \f
3452 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3453 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3454 #endif
3455
3456 /* EXP is some logical combination of boolean tests. See if we can
3457 merge it into some range test. Return the new tree if so. */
3458
3459 static tree
3460 fold_range_test (tree exp)
3461 {
3462 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3463 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3464 int in0_p, in1_p, in_p;
3465 tree low0, low1, low, high0, high1, high;
3466 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3467 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3468 tree tem;
3469
3470 /* If this is an OR operation, invert both sides; we will invert
3471 again at the end. */
3472 if (or_op)
3473 in0_p = ! in0_p, in1_p = ! in1_p;
3474
3475 /* If both expressions are the same, if we can merge the ranges, and we
3476 can build the range test, return it or it inverted. If one of the
3477 ranges is always true or always false, consider it to be the same
3478 expression as the other. */
3479 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3480 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3481 in1_p, low1, high1)
3482 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3483 lhs != 0 ? lhs
3484 : rhs != 0 ? rhs : integer_zero_node,
3485 in_p, low, high))))
3486 return or_op ? invert_truthvalue (tem) : tem;
3487
3488 /* On machines where the branch cost is expensive, if this is a
3489 short-circuited branch and the underlying object on both sides
3490 is the same, make a non-short-circuit operation. */
3491 else if (RANGE_TEST_NON_SHORT_CIRCUIT
3492 && lhs != 0 && rhs != 0
3493 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3494 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3495 && operand_equal_p (lhs, rhs, 0))
3496 {
3497 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3498 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3499 which cases we can't do this. */
3500 if (simple_operand_p (lhs))
3501 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3502 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3503 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3504 TREE_OPERAND (exp, 1));
3505
3506 else if ((*lang_hooks.decls.global_bindings_p) () == 0
3507 && ! CONTAINS_PLACEHOLDER_P (lhs))
3508 {
3509 tree common = save_expr (lhs);
3510
3511 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3512 or_op ? ! in0_p : in0_p,
3513 low0, high0))
3514 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3515 or_op ? ! in1_p : in1_p,
3516 low1, high1))))
3517 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3518 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3519 TREE_TYPE (exp), lhs, rhs);
3520 }
3521 }
3522
3523 return 0;
3524 }
3525 \f
3526 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3527 bit value. Arrange things so the extra bits will be set to zero if and
3528 only if C is signed-extended to its full width. If MASK is nonzero,
3529 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3530
3531 static tree
3532 unextend (tree c, int p, int unsignedp, tree mask)
3533 {
3534 tree type = TREE_TYPE (c);
3535 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3536 tree temp;
3537
3538 if (p == modesize || unsignedp)
3539 return c;
3540
3541 /* We work by getting just the sign bit into the low-order bit, then
3542 into the high-order bit, then sign-extend. We then XOR that value
3543 with C. */
3544 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3545 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3546
3547 /* We must use a signed type in order to get an arithmetic right shift.
3548 However, we must also avoid introducing accidental overflows, so that
3549 a subsequent call to integer_zerop will work. Hence we must
3550 do the type conversion here. At this point, the constant is either
3551 zero or one, and the conversion to a signed type can never overflow.
3552 We could get an overflow if this conversion is done anywhere else. */
3553 if (TREE_UNSIGNED (type))
3554 temp = convert ((*lang_hooks.types.signed_type) (type), temp);
3555
3556 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3557 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3558 if (mask != 0)
3559 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3560 /* If necessary, convert the type back to match the type of C. */
3561 if (TREE_UNSIGNED (type))
3562 temp = convert (type, temp);
3563
3564 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3565 }
3566 \f
3567 /* Find ways of folding logical expressions of LHS and RHS:
3568 Try to merge two comparisons to the same innermost item.
3569 Look for range tests like "ch >= '0' && ch <= '9'".
3570 Look for combinations of simple terms on machines with expensive branches
3571 and evaluate the RHS unconditionally.
3572
3573 For example, if we have p->a == 2 && p->b == 4 and we can make an
3574 object large enough to span both A and B, we can do this with a comparison
3575 against the object ANDed with the a mask.
3576
3577 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3578 operations to do this with one comparison.
3579
3580 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3581 function and the one above.
3582
3583 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3584 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3585
3586 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3587 two operands.
3588
3589 We return the simplified tree or 0 if no optimization is possible. */
3590
3591 static tree
3592 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
3593 {
3594 /* If this is the "or" of two comparisons, we can do something if
3595 the comparisons are NE_EXPR. If this is the "and", we can do something
3596 if the comparisons are EQ_EXPR. I.e.,
3597 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3598
3599 WANTED_CODE is this operation code. For single bit fields, we can
3600 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3601 comparison for one-bit fields. */
3602
3603 enum tree_code wanted_code;
3604 enum tree_code lcode, rcode;
3605 tree ll_arg, lr_arg, rl_arg, rr_arg;
3606 tree ll_inner, lr_inner, rl_inner, rr_inner;
3607 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3608 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3609 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3610 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3611 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3612 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3613 enum machine_mode lnmode, rnmode;
3614 tree ll_mask, lr_mask, rl_mask, rr_mask;
3615 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3616 tree l_const, r_const;
3617 tree lntype, rntype, result;
3618 int first_bit, end_bit;
3619 int volatilep;
3620
3621 /* Start by getting the comparison codes. Fail if anything is volatile.
3622 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3623 it were surrounded with a NE_EXPR. */
3624
3625 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3626 return 0;
3627
3628 lcode = TREE_CODE (lhs);
3629 rcode = TREE_CODE (rhs);
3630
3631 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3632 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3633
3634 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3635 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3636
3637 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3638 return 0;
3639
3640 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3641 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3642
3643 ll_arg = TREE_OPERAND (lhs, 0);
3644 lr_arg = TREE_OPERAND (lhs, 1);
3645 rl_arg = TREE_OPERAND (rhs, 0);
3646 rr_arg = TREE_OPERAND (rhs, 1);
3647
3648 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3649 if (simple_operand_p (ll_arg)
3650 && simple_operand_p (lr_arg)
3651 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3652 {
3653 int compcode;
3654
3655 if (operand_equal_p (ll_arg, rl_arg, 0)
3656 && operand_equal_p (lr_arg, rr_arg, 0))
3657 {
3658 int lcompcode, rcompcode;
3659
3660 lcompcode = comparison_to_compcode (lcode);
3661 rcompcode = comparison_to_compcode (rcode);
3662 compcode = (code == TRUTH_AND_EXPR)
3663 ? lcompcode & rcompcode
3664 : lcompcode | rcompcode;
3665 }
3666 else if (operand_equal_p (ll_arg, rr_arg, 0)
3667 && operand_equal_p (lr_arg, rl_arg, 0))
3668 {
3669 int lcompcode, rcompcode;
3670
3671 rcode = swap_tree_comparison (rcode);
3672 lcompcode = comparison_to_compcode (lcode);
3673 rcompcode = comparison_to_compcode (rcode);
3674 compcode = (code == TRUTH_AND_EXPR)
3675 ? lcompcode & rcompcode
3676 : lcompcode | rcompcode;
3677 }
3678 else
3679 compcode = -1;
3680
3681 if (compcode == COMPCODE_TRUE)
3682 return convert (truth_type, integer_one_node);
3683 else if (compcode == COMPCODE_FALSE)
3684 return convert (truth_type, integer_zero_node);
3685 else if (compcode != -1)
3686 return build (compcode_to_comparison (compcode),
3687 truth_type, ll_arg, lr_arg);
3688 }
3689
3690 /* If the RHS can be evaluated unconditionally and its operands are
3691 simple, it wins to evaluate the RHS unconditionally on machines
3692 with expensive branches. In this case, this isn't a comparison
3693 that can be merged. Avoid doing this if the RHS is a floating-point
3694 comparison since those can trap. */
3695
3696 if (BRANCH_COST >= 2
3697 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3698 && simple_operand_p (rl_arg)
3699 && simple_operand_p (rr_arg))
3700 {
3701 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
3702 if (code == TRUTH_OR_EXPR
3703 && lcode == NE_EXPR && integer_zerop (lr_arg)
3704 && rcode == NE_EXPR && integer_zerop (rr_arg)
3705 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3706 return build (NE_EXPR, truth_type,
3707 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3708 ll_arg, rl_arg),
3709 integer_zero_node);
3710
3711 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
3712 if (code == TRUTH_AND_EXPR
3713 && lcode == EQ_EXPR && integer_zerop (lr_arg)
3714 && rcode == EQ_EXPR && integer_zerop (rr_arg)
3715 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3716 return build (EQ_EXPR, truth_type,
3717 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3718 ll_arg, rl_arg),
3719 integer_zero_node);
3720
3721 return build (code, truth_type, lhs, rhs);
3722 }
3723
3724 /* See if the comparisons can be merged. Then get all the parameters for
3725 each side. */
3726
3727 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3728 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3729 return 0;
3730
3731 volatilep = 0;
3732 ll_inner = decode_field_reference (ll_arg,
3733 &ll_bitsize, &ll_bitpos, &ll_mode,
3734 &ll_unsignedp, &volatilep, &ll_mask,
3735 &ll_and_mask);
3736 lr_inner = decode_field_reference (lr_arg,
3737 &lr_bitsize, &lr_bitpos, &lr_mode,
3738 &lr_unsignedp, &volatilep, &lr_mask,
3739 &lr_and_mask);
3740 rl_inner = decode_field_reference (rl_arg,
3741 &rl_bitsize, &rl_bitpos, &rl_mode,
3742 &rl_unsignedp, &volatilep, &rl_mask,
3743 &rl_and_mask);
3744 rr_inner = decode_field_reference (rr_arg,
3745 &rr_bitsize, &rr_bitpos, &rr_mode,
3746 &rr_unsignedp, &volatilep, &rr_mask,
3747 &rr_and_mask);
3748
3749 /* It must be true that the inner operation on the lhs of each
3750 comparison must be the same if we are to be able to do anything.
3751 Then see if we have constants. If not, the same must be true for
3752 the rhs's. */
3753 if (volatilep || ll_inner == 0 || rl_inner == 0
3754 || ! operand_equal_p (ll_inner, rl_inner, 0))
3755 return 0;
3756
3757 if (TREE_CODE (lr_arg) == INTEGER_CST
3758 && TREE_CODE (rr_arg) == INTEGER_CST)
3759 l_const = lr_arg, r_const = rr_arg;
3760 else if (lr_inner == 0 || rr_inner == 0
3761 || ! operand_equal_p (lr_inner, rr_inner, 0))
3762 return 0;
3763 else
3764 l_const = r_const = 0;
3765
3766 /* If either comparison code is not correct for our logical operation,
3767 fail. However, we can convert a one-bit comparison against zero into
3768 the opposite comparison against that bit being set in the field. */
3769
3770 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3771 if (lcode != wanted_code)
3772 {
3773 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3774 {
3775 /* Make the left operand unsigned, since we are only interested
3776 in the value of one bit. Otherwise we are doing the wrong
3777 thing below. */
3778 ll_unsignedp = 1;
3779 l_const = ll_mask;
3780 }
3781 else
3782 return 0;
3783 }
3784
3785 /* This is analogous to the code for l_const above. */
3786 if (rcode != wanted_code)
3787 {
3788 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3789 {
3790 rl_unsignedp = 1;
3791 r_const = rl_mask;
3792 }
3793 else
3794 return 0;
3795 }
3796
3797 /* After this point all optimizations will generate bit-field
3798 references, which we might not want. */
3799 if (! (*lang_hooks.can_use_bit_fields_p) ())
3800 return 0;
3801
3802 /* See if we can find a mode that contains both fields being compared on
3803 the left. If we can't, fail. Otherwise, update all constants and masks
3804 to be relative to a field of that size. */
3805 first_bit = MIN (ll_bitpos, rl_bitpos);
3806 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3807 lnmode = get_best_mode (end_bit - first_bit, first_bit,
3808 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3809 volatilep);
3810 if (lnmode == VOIDmode)
3811 return 0;
3812
3813 lnbitsize = GET_MODE_BITSIZE (lnmode);
3814 lnbitpos = first_bit & ~ (lnbitsize - 1);
3815 lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
3816 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3817
3818 if (BYTES_BIG_ENDIAN)
3819 {
3820 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3821 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3822 }
3823
3824 ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
3825 size_int (xll_bitpos), 0);
3826 rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
3827 size_int (xrl_bitpos), 0);
3828
3829 if (l_const)
3830 {
3831 l_const = convert (lntype, l_const);
3832 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3833 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3834 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3835 fold (build1 (BIT_NOT_EXPR,
3836 lntype, ll_mask)),
3837 0)))
3838 {
3839 warning ("comparison is always %d", wanted_code == NE_EXPR);
3840
3841 return convert (truth_type,
3842 wanted_code == NE_EXPR
3843 ? integer_one_node : integer_zero_node);
3844 }
3845 }
3846 if (r_const)
3847 {
3848 r_const = convert (lntype, r_const);
3849 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3850 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3851 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3852 fold (build1 (BIT_NOT_EXPR,
3853 lntype, rl_mask)),
3854 0)))
3855 {
3856 warning ("comparison is always %d", wanted_code == NE_EXPR);
3857
3858 return convert (truth_type,
3859 wanted_code == NE_EXPR
3860 ? integer_one_node : integer_zero_node);
3861 }
3862 }
3863
3864 /* If the right sides are not constant, do the same for it. Also,
3865 disallow this optimization if a size or signedness mismatch occurs
3866 between the left and right sides. */
3867 if (l_const == 0)
3868 {
3869 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3870 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3871 /* Make sure the two fields on the right
3872 correspond to the left without being swapped. */
3873 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3874 return 0;
3875
3876 first_bit = MIN (lr_bitpos, rr_bitpos);
3877 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3878 rnmode = get_best_mode (end_bit - first_bit, first_bit,
3879 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3880 volatilep);
3881 if (rnmode == VOIDmode)
3882 return 0;
3883
3884 rnbitsize = GET_MODE_BITSIZE (rnmode);
3885 rnbitpos = first_bit & ~ (rnbitsize - 1);
3886 rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
3887 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3888
3889 if (BYTES_BIG_ENDIAN)
3890 {
3891 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3892 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3893 }
3894
3895 lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
3896 size_int (xlr_bitpos), 0);
3897 rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
3898 size_int (xrr_bitpos), 0);
3899
3900 /* Make a mask that corresponds to both fields being compared.
3901 Do this for both items being compared. If the operands are the
3902 same size and the bits being compared are in the same position
3903 then we can do this by masking both and comparing the masked
3904 results. */
3905 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3906 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3907 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
3908 {
3909 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3910 ll_unsignedp || rl_unsignedp);
3911 if (! all_ones_mask_p (ll_mask, lnbitsize))
3912 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
3913
3914 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
3915 lr_unsignedp || rr_unsignedp);
3916 if (! all_ones_mask_p (lr_mask, rnbitsize))
3917 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
3918
3919 return build (wanted_code, truth_type, lhs, rhs);
3920 }
3921
3922 /* There is still another way we can do something: If both pairs of
3923 fields being compared are adjacent, we may be able to make a wider
3924 field containing them both.
3925
3926 Note that we still must mask the lhs/rhs expressions. Furthermore,
3927 the mask must be shifted to account for the shift done by
3928 make_bit_field_ref. */
3929 if ((ll_bitsize + ll_bitpos == rl_bitpos
3930 && lr_bitsize + lr_bitpos == rr_bitpos)
3931 || (ll_bitpos == rl_bitpos + rl_bitsize
3932 && lr_bitpos == rr_bitpos + rr_bitsize))
3933 {
3934 tree type;
3935
3936 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
3937 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
3938 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
3939 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
3940
3941 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
3942 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
3943 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
3944 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
3945
3946 /* Convert to the smaller type before masking out unwanted bits. */
3947 type = lntype;
3948 if (lntype != rntype)
3949 {
3950 if (lnbitsize > rnbitsize)
3951 {
3952 lhs = convert (rntype, lhs);
3953 ll_mask = convert (rntype, ll_mask);
3954 type = rntype;
3955 }
3956 else if (lnbitsize < rnbitsize)
3957 {
3958 rhs = convert (lntype, rhs);
3959 lr_mask = convert (lntype, lr_mask);
3960 type = lntype;
3961 }
3962 }
3963
3964 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
3965 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3966
3967 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
3968 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
3969
3970 return build (wanted_code, truth_type, lhs, rhs);
3971 }
3972
3973 return 0;
3974 }
3975
3976 /* Handle the case of comparisons with constants. If there is something in
3977 common between the masks, those bits of the constants must be the same.
3978 If not, the condition is always false. Test for this to avoid generating
3979 incorrect code below. */
3980 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3981 if (! integer_zerop (result)
3982 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3983 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3984 {
3985 if (wanted_code == NE_EXPR)
3986 {
3987 warning ("`or' of unmatched not-equal tests is always 1");
3988 return convert (truth_type, integer_one_node);
3989 }
3990 else
3991 {
3992 warning ("`and' of mutually exclusive equal-tests is always 0");
3993 return convert (truth_type, integer_zero_node);
3994 }
3995 }
3996
3997 /* Construct the expression we will return. First get the component
3998 reference we will make. Unless the mask is all ones the width of
3999 that field, perform the mask operation. Then compare with the
4000 merged constant. */
4001 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4002 ll_unsignedp || rl_unsignedp);
4003
4004 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4005 if (! all_ones_mask_p (ll_mask, lnbitsize))
4006 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
4007
4008 return build (wanted_code, truth_type, result,
4009 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4010 }
4011 \f
4012 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4013 constant. */
4014
4015 static tree
4016 optimize_minmax_comparison (tree t)
4017 {
4018 tree type = TREE_TYPE (t);
4019 tree arg0 = TREE_OPERAND (t, 0);
4020 enum tree_code op_code;
4021 tree comp_const = TREE_OPERAND (t, 1);
4022 tree minmax_const;
4023 int consts_equal, consts_lt;
4024 tree inner;
4025
4026 STRIP_SIGN_NOPS (arg0);
4027
4028 op_code = TREE_CODE (arg0);
4029 minmax_const = TREE_OPERAND (arg0, 1);
4030 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4031 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4032 inner = TREE_OPERAND (arg0, 0);
4033
4034 /* If something does not permit us to optimize, return the original tree. */
4035 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4036 || TREE_CODE (comp_const) != INTEGER_CST
4037 || TREE_CONSTANT_OVERFLOW (comp_const)
4038 || TREE_CODE (minmax_const) != INTEGER_CST
4039 || TREE_CONSTANT_OVERFLOW (minmax_const))
4040 return t;
4041
4042 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4043 and GT_EXPR, doing the rest with recursive calls using logical
4044 simplifications. */
4045 switch (TREE_CODE (t))
4046 {
4047 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4048 return
4049 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4050
4051 case GE_EXPR:
4052 return
4053 fold (build (TRUTH_ORIF_EXPR, type,
4054 optimize_minmax_comparison
4055 (build (EQ_EXPR, type, arg0, comp_const)),
4056 optimize_minmax_comparison
4057 (build (GT_EXPR, type, arg0, comp_const))));
4058
4059 case EQ_EXPR:
4060 if (op_code == MAX_EXPR && consts_equal)
4061 /* MAX (X, 0) == 0 -> X <= 0 */
4062 return fold (build (LE_EXPR, type, inner, comp_const));
4063
4064 else if (op_code == MAX_EXPR && consts_lt)
4065 /* MAX (X, 0) == 5 -> X == 5 */
4066 return fold (build (EQ_EXPR, type, inner, comp_const));
4067
4068 else if (op_code == MAX_EXPR)
4069 /* MAX (X, 0) == -1 -> false */
4070 return omit_one_operand (type, integer_zero_node, inner);
4071
4072 else if (consts_equal)
4073 /* MIN (X, 0) == 0 -> X >= 0 */
4074 return fold (build (GE_EXPR, type, inner, comp_const));
4075
4076 else if (consts_lt)
4077 /* MIN (X, 0) == 5 -> false */
4078 return omit_one_operand (type, integer_zero_node, inner);
4079
4080 else
4081 /* MIN (X, 0) == -1 -> X == -1 */
4082 return fold (build (EQ_EXPR, type, inner, comp_const));
4083
4084 case GT_EXPR:
4085 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4086 /* MAX (X, 0) > 0 -> X > 0
4087 MAX (X, 0) > 5 -> X > 5 */
4088 return fold (build (GT_EXPR, type, inner, comp_const));
4089
4090 else if (op_code == MAX_EXPR)
4091 /* MAX (X, 0) > -1 -> true */
4092 return omit_one_operand (type, integer_one_node, inner);
4093
4094 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4095 /* MIN (X, 0) > 0 -> false
4096 MIN (X, 0) > 5 -> false */
4097 return omit_one_operand (type, integer_zero_node, inner);
4098
4099 else
4100 /* MIN (X, 0) > -1 -> X > -1 */
4101 return fold (build (GT_EXPR, type, inner, comp_const));
4102
4103 default:
4104 return t;
4105 }
4106 }
4107 \f
4108 /* T is an integer expression that is being multiplied, divided, or taken a
4109 modulus (CODE says which and what kind of divide or modulus) by a
4110 constant C. See if we can eliminate that operation by folding it with
4111 other operations already in T. WIDE_TYPE, if non-null, is a type that
4112 should be used for the computation if wider than our type.
4113
4114 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4115 (X * 2) + (Y * 4). We must, however, be assured that either the original
4116 expression would not overflow or that overflow is undefined for the type
4117 in the language in question.
4118
4119 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4120 the machine has a multiply-accumulate insn or that this is part of an
4121 addressing calculation.
4122
4123 If we return a non-null expression, it is an equivalent form of the
4124 original computation, but need not be in the original type. */
4125
4126 static tree
4127 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4128 {
4129 /* To avoid exponential search depth, refuse to allow recursion past
4130 three levels. Beyond that (1) it's highly unlikely that we'll find
4131 something interesting and (2) we've probably processed it before
4132 when we built the inner expression. */
4133
4134 static int depth;
4135 tree ret;
4136
4137 if (depth > 3)
4138 return NULL;
4139
4140 depth++;
4141 ret = extract_muldiv_1 (t, c, code, wide_type);
4142 depth--;
4143
4144 return ret;
4145 }
4146
4147 static tree
4148 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4149 {
4150 tree type = TREE_TYPE (t);
4151 enum tree_code tcode = TREE_CODE (t);
4152 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4153 > GET_MODE_SIZE (TYPE_MODE (type)))
4154 ? wide_type : type);
4155 tree t1, t2;
4156 int same_p = tcode == code;
4157 tree op0 = NULL_TREE, op1 = NULL_TREE;
4158
4159 /* Don't deal with constants of zero here; they confuse the code below. */
4160 if (integer_zerop (c))
4161 return NULL_TREE;
4162
4163 if (TREE_CODE_CLASS (tcode) == '1')
4164 op0 = TREE_OPERAND (t, 0);
4165
4166 if (TREE_CODE_CLASS (tcode) == '2')
4167 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4168
4169 /* Note that we need not handle conditional operations here since fold
4170 already handles those cases. So just do arithmetic here. */
4171 switch (tcode)
4172 {
4173 case INTEGER_CST:
4174 /* For a constant, we can always simplify if we are a multiply
4175 or (for divide and modulus) if it is a multiple of our constant. */
4176 if (code == MULT_EXPR
4177 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4178 return const_binop (code, convert (ctype, t), convert (ctype, c), 0);
4179 break;
4180
4181 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4182 /* If op0 is an expression ... */
4183 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4184 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4185 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4186 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4187 /* ... and is unsigned, and its type is smaller than ctype,
4188 then we cannot pass through as widening. */
4189 && ((TREE_UNSIGNED (TREE_TYPE (op0))
4190 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4191 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4192 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4193 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4194 /* ... or its type is larger than ctype,
4195 then we cannot pass through this truncation. */
4196 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4197 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4198 /* ... or signedness changes for division or modulus,
4199 then we cannot pass through this conversion. */
4200 || (code != MULT_EXPR
4201 && (TREE_UNSIGNED (ctype)
4202 != TREE_UNSIGNED (TREE_TYPE (op0))))))
4203 break;
4204
4205 /* Pass the constant down and see if we can make a simplification. If
4206 we can, replace this expression with the inner simplification for
4207 possible later conversion to our or some other type. */
4208 if ((t2 = convert (TREE_TYPE (op0), c)) != 0
4209 && TREE_CODE (t2) == INTEGER_CST
4210 && ! TREE_CONSTANT_OVERFLOW (t2)
4211 && (0 != (t1 = extract_muldiv (op0, t2, code,
4212 code == MULT_EXPR
4213 ? ctype : NULL_TREE))))
4214 return t1;
4215 break;
4216
4217 case NEGATE_EXPR: case ABS_EXPR:
4218 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4219 return fold (build1 (tcode, ctype, convert (ctype, t1)));
4220 break;
4221
4222 case MIN_EXPR: case MAX_EXPR:
4223 /* If widening the type changes the signedness, then we can't perform
4224 this optimization as that changes the result. */
4225 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4226 break;
4227
4228 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4229 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4230 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4231 {
4232 if (tree_int_cst_sgn (c) < 0)
4233 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4234
4235 return fold (build (tcode, ctype, convert (ctype, t1),
4236 convert (ctype, t2)));
4237 }
4238 break;
4239
4240 case WITH_RECORD_EXPR:
4241 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4242 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4243 TREE_OPERAND (t, 1));
4244 break;
4245
4246 case LSHIFT_EXPR: case RSHIFT_EXPR:
4247 /* If the second operand is constant, this is a multiplication
4248 or floor division, by a power of two, so we can treat it that
4249 way unless the multiplier or divisor overflows. */
4250 if (TREE_CODE (op1) == INTEGER_CST
4251 /* const_binop may not detect overflow correctly,
4252 so check for it explicitly here. */
4253 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4254 && TREE_INT_CST_HIGH (op1) == 0
4255 && 0 != (t1 = convert (ctype,
4256 const_binop (LSHIFT_EXPR, size_one_node,
4257 op1, 0)))
4258 && ! TREE_OVERFLOW (t1))
4259 return extract_muldiv (build (tcode == LSHIFT_EXPR
4260 ? MULT_EXPR : FLOOR_DIV_EXPR,
4261 ctype, convert (ctype, op0), t1),
4262 c, code, wide_type);
4263 break;
4264
4265 case PLUS_EXPR: case MINUS_EXPR:
4266 /* See if we can eliminate the operation on both sides. If we can, we
4267 can return a new PLUS or MINUS. If we can't, the only remaining
4268 cases where we can do anything are if the second operand is a
4269 constant. */
4270 t1 = extract_muldiv (op0, c, code, wide_type);
4271 t2 = extract_muldiv (op1, c, code, wide_type);
4272 if (t1 != 0 && t2 != 0
4273 && (code == MULT_EXPR
4274 /* If not multiplication, we can only do this if both operands
4275 are divisible by c. */
4276 || (multiple_of_p (ctype, op0, c)
4277 && multiple_of_p (ctype, op1, c))))
4278 return fold (build (tcode, ctype, convert (ctype, t1),
4279 convert (ctype, t2)));
4280
4281 /* If this was a subtraction, negate OP1 and set it to be an addition.
4282 This simplifies the logic below. */
4283 if (tcode == MINUS_EXPR)
4284 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4285
4286 if (TREE_CODE (op1) != INTEGER_CST)
4287 break;
4288
4289 /* If either OP1 or C are negative, this optimization is not safe for
4290 some of the division and remainder types while for others we need
4291 to change the code. */
4292 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4293 {
4294 if (code == CEIL_DIV_EXPR)
4295 code = FLOOR_DIV_EXPR;
4296 else if (code == FLOOR_DIV_EXPR)
4297 code = CEIL_DIV_EXPR;
4298 else if (code != MULT_EXPR
4299 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4300 break;
4301 }
4302
4303 /* If it's a multiply or a division/modulus operation of a multiple
4304 of our constant, do the operation and verify it doesn't overflow. */
4305 if (code == MULT_EXPR
4306 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4307 {
4308 op1 = const_binop (code, convert (ctype, op1), convert (ctype, c), 0);
4309 if (op1 == 0 || TREE_OVERFLOW (op1))
4310 break;
4311 }
4312 else
4313 break;
4314
4315 /* If we have an unsigned type is not a sizetype, we cannot widen
4316 the operation since it will change the result if the original
4317 computation overflowed. */
4318 if (TREE_UNSIGNED (ctype)
4319 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4320 && ctype != type)
4321 break;
4322
4323 /* If we were able to eliminate our operation from the first side,
4324 apply our operation to the second side and reform the PLUS. */
4325 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4326 return fold (build (tcode, ctype, convert (ctype, t1), op1));
4327
4328 /* The last case is if we are a multiply. In that case, we can
4329 apply the distributive law to commute the multiply and addition
4330 if the multiplication of the constants doesn't overflow. */
4331 if (code == MULT_EXPR)
4332 return fold (build (tcode, ctype, fold (build (code, ctype,
4333 convert (ctype, op0),
4334 convert (ctype, c))),
4335 op1));
4336
4337 break;
4338
4339 case MULT_EXPR:
4340 /* We have a special case here if we are doing something like
4341 (C * 8) % 4 since we know that's zero. */
4342 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4343 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4344 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4345 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4346 return omit_one_operand (type, integer_zero_node, op0);
4347
4348 /* ... fall through ... */
4349
4350 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4351 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4352 /* If we can extract our operation from the LHS, do so and return a
4353 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4354 do something only if the second operand is a constant. */
4355 if (same_p
4356 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4357 return fold (build (tcode, ctype, convert (ctype, t1),
4358 convert (ctype, op1)));
4359 else if (tcode == MULT_EXPR && code == MULT_EXPR
4360 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4361 return fold (build (tcode, ctype, convert (ctype, op0),
4362 convert (ctype, t1)));
4363 else if (TREE_CODE (op1) != INTEGER_CST)
4364 return 0;
4365
4366 /* If these are the same operation types, we can associate them
4367 assuming no overflow. */
4368 if (tcode == code
4369 && 0 != (t1 = const_binop (MULT_EXPR, convert (ctype, op1),
4370 convert (ctype, c), 0))
4371 && ! TREE_OVERFLOW (t1))
4372 return fold (build (tcode, ctype, convert (ctype, op0), t1));
4373
4374 /* If these operations "cancel" each other, we have the main
4375 optimizations of this pass, which occur when either constant is a
4376 multiple of the other, in which case we replace this with either an
4377 operation or CODE or TCODE.
4378
4379 If we have an unsigned type that is not a sizetype, we cannot do
4380 this since it will change the result if the original computation
4381 overflowed. */
4382 if ((! TREE_UNSIGNED (ctype)
4383 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4384 && ! flag_wrapv
4385 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4386 || (tcode == MULT_EXPR
4387 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4388 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4389 {
4390 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4391 return fold (build (tcode, ctype, convert (ctype, op0),
4392 convert (ctype,
4393 const_binop (TRUNC_DIV_EXPR,
4394 op1, c, 0))));
4395 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4396 return fold (build (code, ctype, convert (ctype, op0),
4397 convert (ctype,
4398 const_binop (TRUNC_DIV_EXPR,
4399 c, op1, 0))));
4400 }
4401 break;
4402
4403 default:
4404 break;
4405 }
4406
4407 return 0;
4408 }
4409 \f
4410 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4411 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4412 that we may sometimes modify the tree. */
4413
4414 static tree
4415 strip_compound_expr (tree t, tree s)
4416 {
4417 enum tree_code code = TREE_CODE (t);
4418
4419 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4420 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4421 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4422 return TREE_OPERAND (t, 1);
4423
4424 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4425 don't bother handling any other types. */
4426 else if (code == COND_EXPR)
4427 {
4428 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4429 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4430 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4431 }
4432 else if (TREE_CODE_CLASS (code) == '1')
4433 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4434 else if (TREE_CODE_CLASS (code) == '<'
4435 || TREE_CODE_CLASS (code) == '2')
4436 {
4437 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4438 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4439 }
4440
4441 return t;
4442 }
4443 \f
4444 /* Return a node which has the indicated constant VALUE (either 0 or
4445 1), and is of the indicated TYPE. */
4446
4447 static tree
4448 constant_boolean_node (int value, tree type)
4449 {
4450 if (type == integer_type_node)
4451 return value ? integer_one_node : integer_zero_node;
4452 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4453 return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4454 integer_zero_node);
4455 else
4456 {
4457 tree t = build_int_2 (value, 0);
4458
4459 TREE_TYPE (t) = type;
4460 return t;
4461 }
4462 }
4463
4464 /* Utility function for the following routine, to see how complex a nesting of
4465 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4466 we don't care (to avoid spending too much time on complex expressions.). */
4467
4468 static int
4469 count_cond (tree expr, int lim)
4470 {
4471 int ctrue, cfalse;
4472
4473 if (TREE_CODE (expr) != COND_EXPR)
4474 return 0;
4475 else if (lim <= 0)
4476 return 0;
4477
4478 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4479 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4480 return MIN (lim, 1 + ctrue + cfalse);
4481 }
4482
4483 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4484 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4485 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4486 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4487 COND is the first argument to CODE; otherwise (as in the example
4488 given here), it is the second argument. TYPE is the type of the
4489 original expression. */
4490
4491 static tree
4492 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
4493 tree cond, tree arg, int cond_first_p)
4494 {
4495 tree test, true_value, false_value;
4496 tree lhs = NULL_TREE;
4497 tree rhs = NULL_TREE;
4498 /* In the end, we'll produce a COND_EXPR. Both arms of the
4499 conditional expression will be binary operations. The left-hand
4500 side of the expression to be executed if the condition is true
4501 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4502 of the expression to be executed if the condition is true will be
4503 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4504 but apply to the expression to be executed if the conditional is
4505 false. */
4506 tree *true_lhs;
4507 tree *true_rhs;
4508 tree *false_lhs;
4509 tree *false_rhs;
4510 /* These are the codes to use for the left-hand side and right-hand
4511 side of the COND_EXPR. Normally, they are the same as CODE. */
4512 enum tree_code lhs_code = code;
4513 enum tree_code rhs_code = code;
4514 /* And these are the types of the expressions. */
4515 tree lhs_type = type;
4516 tree rhs_type = type;
4517 int save = 0;
4518
4519 if (cond_first_p)
4520 {
4521 true_rhs = false_rhs = &arg;
4522 true_lhs = &true_value;
4523 false_lhs = &false_value;
4524 }
4525 else
4526 {
4527 true_lhs = false_lhs = &arg;
4528 true_rhs = &true_value;
4529 false_rhs = &false_value;
4530 }
4531
4532 if (TREE_CODE (cond) == COND_EXPR)
4533 {
4534 test = TREE_OPERAND (cond, 0);
4535 true_value = TREE_OPERAND (cond, 1);
4536 false_value = TREE_OPERAND (cond, 2);
4537 /* If this operand throws an expression, then it does not make
4538 sense to try to perform a logical or arithmetic operation
4539 involving it. Instead of building `a + throw 3' for example,
4540 we simply build `a, throw 3'. */
4541 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4542 {
4543 if (! cond_first_p)
4544 {
4545 lhs_code = COMPOUND_EXPR;
4546 lhs_type = void_type_node;
4547 }
4548 else
4549 lhs = true_value;
4550 }
4551 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4552 {
4553 if (! cond_first_p)
4554 {
4555 rhs_code = COMPOUND_EXPR;
4556 rhs_type = void_type_node;
4557 }
4558 else
4559 rhs = false_value;
4560 }
4561 }
4562 else
4563 {
4564 tree testtype = TREE_TYPE (cond);
4565 test = cond;
4566 true_value = convert (testtype, integer_one_node);
4567 false_value = convert (testtype, integer_zero_node);
4568 }
4569
4570 /* If ARG is complex we want to make sure we only evaluate it once. Though
4571 this is only required if it is volatile, it might be more efficient even
4572 if it is not. However, if we succeed in folding one part to a constant,
4573 we do not need to make this SAVE_EXPR. Since we do this optimization
4574 primarily to see if we do end up with constant and this SAVE_EXPR
4575 interferes with later optimizations, suppressing it when we can is
4576 important.
4577
4578 If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4579 do so. Don't try to see if the result is a constant if an arm is a
4580 COND_EXPR since we get exponential behavior in that case. */
4581
4582 if (saved_expr_p (arg))
4583 save = 1;
4584 else if (lhs == 0 && rhs == 0
4585 && !TREE_CONSTANT (arg)
4586 && (*lang_hooks.decls.global_bindings_p) () == 0
4587 && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4588 || TREE_SIDE_EFFECTS (arg)))
4589 {
4590 if (TREE_CODE (true_value) != COND_EXPR)
4591 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4592
4593 if (TREE_CODE (false_value) != COND_EXPR)
4594 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4595
4596 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4597 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4598 {
4599 arg = save_expr (arg);
4600 lhs = rhs = 0;
4601 save = 1;
4602 }
4603 }
4604
4605 if (lhs == 0)
4606 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4607 if (rhs == 0)
4608 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4609
4610 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4611
4612 if (save)
4613 return build (COMPOUND_EXPR, type,
4614 convert (void_type_node, arg),
4615 strip_compound_expr (test, arg));
4616 else
4617 return convert (type, test);
4618 }
4619
4620 \f
4621 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4622
4623 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4624 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4625 ADDEND is the same as X.
4626
4627 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4628 and finite. The problematic cases are when X is zero, and its mode
4629 has signed zeros. In the case of rounding towards -infinity,
4630 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4631 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4632
4633 static bool
4634 fold_real_zero_addition_p (tree type, tree addend, int negate)
4635 {
4636 if (!real_zerop (addend))
4637 return false;
4638
4639 /* Don't allow the fold with -fsignaling-nans. */
4640 if (HONOR_SNANS (TYPE_MODE (type)))
4641 return false;
4642
4643 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4644 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4645 return true;
4646
4647 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4648 if (TREE_CODE (addend) == REAL_CST
4649 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4650 negate = !negate;
4651
4652 /* The mode has signed zeros, and we have to honor their sign.
4653 In this situation, there is only one case we can return true for.
4654 X - 0 is the same as X unless rounding towards -infinity is
4655 supported. */
4656 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4657 }
4658
4659 /* Subroutine of fold() that checks comparisons of built-in math
4660 functions against real constants.
4661
4662 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
4663 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
4664 is the type of the result and ARG0 and ARG1 are the operands of the
4665 comparison. ARG1 must be a TREE_REAL_CST.
4666
4667 The function returns the constant folded tree if a simplification
4668 can be made, and NULL_TREE otherwise. */
4669
4670 static tree
4671 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
4672 tree type, tree arg0, tree arg1)
4673 {
4674 REAL_VALUE_TYPE c;
4675
4676 if (fcode == BUILT_IN_SQRT
4677 || fcode == BUILT_IN_SQRTF
4678 || fcode == BUILT_IN_SQRTL)
4679 {
4680 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
4681 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
4682
4683 c = TREE_REAL_CST (arg1);
4684 if (REAL_VALUE_NEGATIVE (c))
4685 {
4686 /* sqrt(x) < y is always false, if y is negative. */
4687 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
4688 return omit_one_operand (type,
4689 convert (type, integer_zero_node),
4690 arg);
4691
4692 /* sqrt(x) > y is always true, if y is negative and we
4693 don't care about NaNs, i.e. negative values of x. */
4694 if (code == NE_EXPR || !HONOR_NANS (mode))
4695 return omit_one_operand (type,
4696 convert (type, integer_one_node),
4697 arg);
4698
4699 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
4700 return fold (build (GE_EXPR, type, arg,
4701 build_real (TREE_TYPE (arg), dconst0)));
4702 }
4703 else if (code == GT_EXPR || code == GE_EXPR)
4704 {
4705 REAL_VALUE_TYPE c2;
4706
4707 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4708 real_convert (&c2, mode, &c2);
4709
4710 if (REAL_VALUE_ISINF (c2))
4711 {
4712 /* sqrt(x) > y is x == +Inf, when y is very large. */
4713 if (HONOR_INFINITIES (mode))
4714 return fold (build (EQ_EXPR, type, arg,
4715 build_real (TREE_TYPE (arg), c2)));
4716
4717 /* sqrt(x) > y is always false, when y is very large
4718 and we don't care about infinities. */
4719 return omit_one_operand (type,
4720 convert (type, integer_zero_node),
4721 arg);
4722 }
4723
4724 /* sqrt(x) > c is the same as x > c*c. */
4725 return fold (build (code, type, arg,
4726 build_real (TREE_TYPE (arg), c2)));
4727 }
4728 else if (code == LT_EXPR || code == LE_EXPR)
4729 {
4730 REAL_VALUE_TYPE c2;
4731
4732 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4733 real_convert (&c2, mode, &c2);
4734
4735 if (REAL_VALUE_ISINF (c2))
4736 {
4737 /* sqrt(x) < y is always true, when y is a very large
4738 value and we don't care about NaNs or Infinities. */
4739 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
4740 return omit_one_operand (type,
4741 convert (type, integer_one_node),
4742 arg);
4743
4744 /* sqrt(x) < y is x != +Inf when y is very large and we
4745 don't care about NaNs. */
4746 if (! HONOR_NANS (mode))
4747 return fold (build (NE_EXPR, type, arg,
4748 build_real (TREE_TYPE (arg), c2)));
4749
4750 /* sqrt(x) < y is x >= 0 when y is very large and we
4751 don't care about Infinities. */
4752 if (! HONOR_INFINITIES (mode))
4753 return fold (build (GE_EXPR, type, arg,
4754 build_real (TREE_TYPE (arg), dconst0)));
4755
4756 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
4757 if ((*lang_hooks.decls.global_bindings_p) () != 0
4758 || CONTAINS_PLACEHOLDER_P (arg))
4759 return NULL_TREE;
4760
4761 arg = save_expr (arg);
4762 return fold (build (TRUTH_ANDIF_EXPR, type,
4763 fold (build (GE_EXPR, type, arg,
4764 build_real (TREE_TYPE (arg),
4765 dconst0))),
4766 fold (build (NE_EXPR, type, arg,
4767 build_real (TREE_TYPE (arg),
4768 c2)))));
4769 }
4770
4771 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
4772 if (! HONOR_NANS (mode))
4773 return fold (build (code, type, arg,
4774 build_real (TREE_TYPE (arg), c2)));
4775
4776 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
4777 if ((*lang_hooks.decls.global_bindings_p) () == 0
4778 && ! CONTAINS_PLACEHOLDER_P (arg))
4779 {
4780 arg = save_expr (arg);
4781 return fold (build (TRUTH_ANDIF_EXPR, type,
4782 fold (build (GE_EXPR, type, arg,
4783 build_real (TREE_TYPE (arg),
4784 dconst0))),
4785 fold (build (code, type, arg,
4786 build_real (TREE_TYPE (arg),
4787 c2)))));
4788 }
4789 }
4790 }
4791
4792 return NULL_TREE;
4793 }
4794
4795 /* Subroutine of fold() that optimizes comparisons against Infinities,
4796 either +Inf or -Inf.
4797
4798 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
4799 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
4800 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
4801
4802 The function returns the constant folded tree if a simplification
4803 can be made, and NULL_TREE otherwise. */
4804
4805 static tree
4806 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
4807 {
4808 enum machine_mode mode;
4809 REAL_VALUE_TYPE max;
4810 tree temp;
4811 bool neg;
4812
4813 mode = TYPE_MODE (TREE_TYPE (arg0));
4814
4815 /* For negative infinity swap the sense of the comparison. */
4816 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
4817 if (neg)
4818 code = swap_tree_comparison (code);
4819
4820 switch (code)
4821 {
4822 case GT_EXPR:
4823 /* x > +Inf is always false, if with ignore sNANs. */
4824 if (HONOR_SNANS (mode))
4825 return NULL_TREE;
4826 return omit_one_operand (type,
4827 convert (type, integer_zero_node),
4828 arg0);
4829
4830 case LE_EXPR:
4831 /* x <= +Inf is always true, if we don't case about NaNs. */
4832 if (! HONOR_NANS (mode))
4833 return omit_one_operand (type,
4834 convert (type, integer_one_node),
4835 arg0);
4836
4837 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
4838 if ((*lang_hooks.decls.global_bindings_p) () == 0
4839 && ! CONTAINS_PLACEHOLDER_P (arg0))
4840 {
4841 arg0 = save_expr (arg0);
4842 return fold (build (EQ_EXPR, type, arg0, arg0));
4843 }
4844 break;
4845
4846 case EQ_EXPR:
4847 case GE_EXPR:
4848 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
4849 real_maxval (&max, neg, mode);
4850 return fold (build (neg ? LT_EXPR : GT_EXPR, type,
4851 arg0, build_real (TREE_TYPE (arg0), max)));
4852
4853 case LT_EXPR:
4854 /* x < +Inf is always equal to x <= DBL_MAX. */
4855 real_maxval (&max, neg, mode);
4856 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
4857 arg0, build_real (TREE_TYPE (arg0), max)));
4858
4859 case NE_EXPR:
4860 /* x != +Inf is always equal to !(x > DBL_MAX). */
4861 real_maxval (&max, neg, mode);
4862 if (! HONOR_NANS (mode))
4863 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
4864 arg0, build_real (TREE_TYPE (arg0), max)));
4865 temp = fold (build (neg ? LT_EXPR : GT_EXPR, type,
4866 arg0, build_real (TREE_TYPE (arg0), max)));
4867 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
4868
4869 default:
4870 break;
4871 }
4872
4873 return NULL_TREE;
4874 }
4875
4876 /* If CODE with arguments ARG0 and ARG1 represents a single bit
4877 equality/inequality test, then return a simplified form of
4878 the test using shifts and logical operations. Otherwise return
4879 NULL. TYPE is the desired result type. */
4880
4881 tree
4882 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
4883 tree result_type)
4884 {
4885 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
4886 operand 0. */
4887 if (code == TRUTH_NOT_EXPR)
4888 {
4889 code = TREE_CODE (arg0);
4890 if (code != NE_EXPR && code != EQ_EXPR)
4891 return NULL_TREE;
4892
4893 /* Extract the arguments of the EQ/NE. */
4894 arg1 = TREE_OPERAND (arg0, 1);
4895 arg0 = TREE_OPERAND (arg0, 0);
4896
4897 /* This requires us to invert the code. */
4898 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
4899 }
4900
4901 /* If this is testing a single bit, we can optimize the test. */
4902 if ((code == NE_EXPR || code == EQ_EXPR)
4903 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
4904 && integer_pow2p (TREE_OPERAND (arg0, 1)))
4905 {
4906 tree inner = TREE_OPERAND (arg0, 0);
4907 tree type = TREE_TYPE (arg0);
4908 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
4909 enum machine_mode operand_mode = TYPE_MODE (type);
4910 int ops_unsigned;
4911 tree signed_type, unsigned_type;
4912 tree arg00;
4913
4914 /* If we have (A & C) != 0 where C is the sign bit of A, convert
4915 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
4916 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
4917 if (arg00 != NULL_TREE)
4918 {
4919 tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
4920 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, result_type,
4921 convert (stype, arg00),
4922 convert (stype, integer_zero_node)));
4923 }
4924
4925 /* At this point, we know that arg0 is not testing the sign bit. */
4926 if (TYPE_PRECISION (type) - 1 == bitnum)
4927 abort ();
4928
4929 /* Otherwise we have (A & C) != 0 where C is a single bit,
4930 convert that into ((A >> C2) & 1). Where C2 = log2(C).
4931 Similarly for (A & C) == 0. */
4932
4933 /* If INNER is a right shift of a constant and it plus BITNUM does
4934 not overflow, adjust BITNUM and INNER. */
4935 if (TREE_CODE (inner) == RSHIFT_EXPR
4936 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
4937 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
4938 && bitnum < TYPE_PRECISION (type)
4939 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
4940 bitnum - TYPE_PRECISION (type)))
4941 {
4942 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
4943 inner = TREE_OPERAND (inner, 0);
4944 }
4945
4946 /* If we are going to be able to omit the AND below, we must do our
4947 operations as unsigned. If we must use the AND, we have a choice.
4948 Normally unsigned is faster, but for some machines signed is. */
4949 #ifdef LOAD_EXTEND_OP
4950 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
4951 #else
4952 ops_unsigned = 1;
4953 #endif
4954
4955 signed_type = (*lang_hooks.types.type_for_mode) (operand_mode, 0);
4956 unsigned_type = (*lang_hooks.types.type_for_mode) (operand_mode, 1);
4957
4958 if (bitnum != 0)
4959 inner = build (RSHIFT_EXPR, ops_unsigned ? unsigned_type : signed_type,
4960 inner, size_int (bitnum));
4961
4962 if (code == EQ_EXPR)
4963 inner = build (BIT_XOR_EXPR, ops_unsigned ? unsigned_type : signed_type,
4964 inner, integer_one_node);
4965
4966 /* Put the AND last so it can combine with more things. */
4967 inner = build (BIT_AND_EXPR, ops_unsigned ? unsigned_type : signed_type,
4968 inner, integer_one_node);
4969
4970 /* Make sure to return the proper type. */
4971 if (TREE_TYPE (inner) != result_type)
4972 inner = convert (result_type, inner);
4973
4974 return inner;
4975 }
4976 return NULL_TREE;
4977 }
4978
4979 /* Perform constant folding and related simplification of EXPR.
4980 The related simplifications include x*1 => x, x*0 => 0, etc.,
4981 and application of the associative law.
4982 NOP_EXPR conversions may be removed freely (as long as we
4983 are careful not to change the C type of the overall expression)
4984 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4985 but we can constant-fold them if they have constant operands. */
4986
4987 #ifdef ENABLE_FOLD_CHECKING
4988 # define fold(x) fold_1 (x)
4989 static tree fold_1 (tree);
4990 static
4991 #endif
4992 tree
4993 fold (tree expr)
4994 {
4995 tree t = expr, orig_t;
4996 tree t1 = NULL_TREE;
4997 tree tem;
4998 tree type = TREE_TYPE (expr);
4999 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5000 enum tree_code code = TREE_CODE (t);
5001 int kind = TREE_CODE_CLASS (code);
5002 int invert;
5003 /* WINS will be nonzero when the switch is done
5004 if all operands are constant. */
5005 int wins = 1;
5006
5007 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5008 Likewise for a SAVE_EXPR that's already been evaluated. */
5009 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5010 return t;
5011
5012 /* Return right away if a constant. */
5013 if (kind == 'c')
5014 return t;
5015
5016 #ifdef MAX_INTEGER_COMPUTATION_MODE
5017 check_max_integer_computation_mode (expr);
5018 #endif
5019 orig_t = t;
5020
5021 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5022 {
5023 tree subop;
5024
5025 /* Special case for conversion ops that can have fixed point args. */
5026 arg0 = TREE_OPERAND (t, 0);
5027
5028 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5029 if (arg0 != 0)
5030 STRIP_SIGN_NOPS (arg0);
5031
5032 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5033 subop = TREE_REALPART (arg0);
5034 else
5035 subop = arg0;
5036
5037 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5038 && TREE_CODE (subop) != REAL_CST
5039 )
5040 /* Note that TREE_CONSTANT isn't enough:
5041 static var addresses are constant but we can't
5042 do arithmetic on them. */
5043 wins = 0;
5044 }
5045 else if (IS_EXPR_CODE_CLASS (kind))
5046 {
5047 int len = first_rtl_op (code);
5048 int i;
5049 for (i = 0; i < len; i++)
5050 {
5051 tree op = TREE_OPERAND (t, i);
5052 tree subop;
5053
5054 if (op == 0)
5055 continue; /* Valid for CALL_EXPR, at least. */
5056
5057 if (kind == '<' || code == RSHIFT_EXPR)
5058 {
5059 /* Signedness matters here. Perhaps we can refine this
5060 later. */
5061 STRIP_SIGN_NOPS (op);
5062 }
5063 else
5064 /* Strip any conversions that don't change the mode. */
5065 STRIP_NOPS (op);
5066
5067 if (TREE_CODE (op) == COMPLEX_CST)
5068 subop = TREE_REALPART (op);
5069 else
5070 subop = op;
5071
5072 if (TREE_CODE (subop) != INTEGER_CST
5073 && TREE_CODE (subop) != REAL_CST)
5074 /* Note that TREE_CONSTANT isn't enough:
5075 static var addresses are constant but we can't
5076 do arithmetic on them. */
5077 wins = 0;
5078
5079 if (i == 0)
5080 arg0 = op;
5081 else if (i == 1)
5082 arg1 = op;
5083 }
5084 }
5085
5086 /* If this is a commutative operation, and ARG0 is a constant, move it
5087 to ARG1 to reduce the number of tests below. */
5088 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
5089 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
5090 || code == BIT_AND_EXPR)
5091 && ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) != INTEGER_CST)
5092 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) != REAL_CST)))
5093 {
5094 tem = arg0; arg0 = arg1; arg1 = tem;
5095
5096 if (t == orig_t)
5097 t = copy_node (t);
5098 TREE_OPERAND (t, 0) = arg0;
5099 TREE_OPERAND (t, 1) = arg1;
5100 }
5101
5102 /* Now WINS is set as described above,
5103 ARG0 is the first operand of EXPR,
5104 and ARG1 is the second operand (if it has more than one operand).
5105
5106 First check for cases where an arithmetic operation is applied to a
5107 compound, conditional, or comparison operation. Push the arithmetic
5108 operation inside the compound or conditional to see if any folding
5109 can then be done. Convert comparison to conditional for this purpose.
5110 The also optimizes non-constant cases that used to be done in
5111 expand_expr.
5112
5113 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5114 one of the operands is a comparison and the other is a comparison, a
5115 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5116 code below would make the expression more complex. Change it to a
5117 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5118 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5119
5120 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5121 || code == EQ_EXPR || code == NE_EXPR)
5122 && ((truth_value_p (TREE_CODE (arg0))
5123 && (truth_value_p (TREE_CODE (arg1))
5124 || (TREE_CODE (arg1) == BIT_AND_EXPR
5125 && integer_onep (TREE_OPERAND (arg1, 1)))))
5126 || (truth_value_p (TREE_CODE (arg1))
5127 && (truth_value_p (TREE_CODE (arg0))
5128 || (TREE_CODE (arg0) == BIT_AND_EXPR
5129 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5130 {
5131 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5132 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5133 : TRUTH_XOR_EXPR,
5134 type, arg0, arg1));
5135
5136 if (code == EQ_EXPR)
5137 t = invert_truthvalue (t);
5138
5139 return t;
5140 }
5141
5142 if (TREE_CODE_CLASS (code) == '1')
5143 {
5144 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5145 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5146 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5147 else if (TREE_CODE (arg0) == COND_EXPR)
5148 {
5149 tree arg01 = TREE_OPERAND (arg0, 1);
5150 tree arg02 = TREE_OPERAND (arg0, 2);
5151 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5152 arg01 = fold (build1 (code, type, arg01));
5153 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5154 arg02 = fold (build1 (code, type, arg02));
5155 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5156 arg01, arg02));
5157
5158 /* If this was a conversion, and all we did was to move into
5159 inside the COND_EXPR, bring it back out. But leave it if
5160 it is a conversion from integer to integer and the
5161 result precision is no wider than a word since such a
5162 conversion is cheap and may be optimized away by combine,
5163 while it couldn't if it were outside the COND_EXPR. Then return
5164 so we don't get into an infinite recursion loop taking the
5165 conversion out and then back in. */
5166
5167 if ((code == NOP_EXPR || code == CONVERT_EXPR
5168 || code == NON_LVALUE_EXPR)
5169 && TREE_CODE (t) == COND_EXPR
5170 && TREE_CODE (TREE_OPERAND (t, 1)) == code
5171 && TREE_CODE (TREE_OPERAND (t, 2)) == code
5172 && ! VOID_TYPE_P (TREE_OPERAND (t, 1))
5173 && ! VOID_TYPE_P (TREE_OPERAND (t, 2))
5174 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
5175 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
5176 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
5177 && (INTEGRAL_TYPE_P
5178 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
5179 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
5180 t = build1 (code, type,
5181 build (COND_EXPR,
5182 TREE_TYPE (TREE_OPERAND
5183 (TREE_OPERAND (t, 1), 0)),
5184 TREE_OPERAND (t, 0),
5185 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
5186 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
5187 return t;
5188 }
5189 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5190 return fold (build (COND_EXPR, type, arg0,
5191 fold (build1 (code, type, integer_one_node)),
5192 fold (build1 (code, type, integer_zero_node))));
5193 }
5194 else if (TREE_CODE_CLASS (code) == '<'
5195 && TREE_CODE (arg0) == COMPOUND_EXPR)
5196 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5197 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5198 else if (TREE_CODE_CLASS (code) == '<'
5199 && TREE_CODE (arg1) == COMPOUND_EXPR)
5200 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5201 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5202 else if (TREE_CODE_CLASS (code) == '2'
5203 || TREE_CODE_CLASS (code) == '<')
5204 {
5205 if (TREE_CODE (arg1) == COMPOUND_EXPR
5206 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1, 0))
5207 && ! TREE_SIDE_EFFECTS (arg0))
5208 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5209 fold (build (code, type,
5210 arg0, TREE_OPERAND (arg1, 1))));
5211 else if ((TREE_CODE (arg1) == COND_EXPR
5212 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
5213 && TREE_CODE_CLASS (code) != '<'))
5214 && (TREE_CODE (arg0) != COND_EXPR
5215 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5216 && (! TREE_SIDE_EFFECTS (arg0)
5217 || ((*lang_hooks.decls.global_bindings_p) () == 0
5218 && ! CONTAINS_PLACEHOLDER_P (arg0))))
5219 return
5220 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5221 /*cond_first_p=*/0);
5222 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
5223 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5224 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5225 else if ((TREE_CODE (arg0) == COND_EXPR
5226 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5227 && TREE_CODE_CLASS (code) != '<'))
5228 && (TREE_CODE (arg1) != COND_EXPR
5229 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5230 && (! TREE_SIDE_EFFECTS (arg1)
5231 || ((*lang_hooks.decls.global_bindings_p) () == 0
5232 && ! CONTAINS_PLACEHOLDER_P (arg1))))
5233 return
5234 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5235 /*cond_first_p=*/1);
5236 }
5237
5238 switch (code)
5239 {
5240 case INTEGER_CST:
5241 case REAL_CST:
5242 case VECTOR_CST:
5243 case STRING_CST:
5244 case COMPLEX_CST:
5245 case CONSTRUCTOR:
5246 return t;
5247
5248 case CONST_DECL:
5249 return fold (DECL_INITIAL (t));
5250
5251 case NOP_EXPR:
5252 case FLOAT_EXPR:
5253 case CONVERT_EXPR:
5254 case FIX_TRUNC_EXPR:
5255 /* Other kinds of FIX are not handled properly by fold_convert. */
5256
5257 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
5258 return TREE_OPERAND (t, 0);
5259
5260 /* Handle cases of two conversions in a row. */
5261 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5262 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5263 {
5264 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5265 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5266 tree final_type = TREE_TYPE (t);
5267 int inside_int = INTEGRAL_TYPE_P (inside_type);
5268 int inside_ptr = POINTER_TYPE_P (inside_type);
5269 int inside_float = FLOAT_TYPE_P (inside_type);
5270 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5271 int inside_unsignedp = TREE_UNSIGNED (inside_type);
5272 int inter_int = INTEGRAL_TYPE_P (inter_type);
5273 int inter_ptr = POINTER_TYPE_P (inter_type);
5274 int inter_float = FLOAT_TYPE_P (inter_type);
5275 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5276 int inter_unsignedp = TREE_UNSIGNED (inter_type);
5277 int final_int = INTEGRAL_TYPE_P (final_type);
5278 int final_ptr = POINTER_TYPE_P (final_type);
5279 int final_float = FLOAT_TYPE_P (final_type);
5280 unsigned int final_prec = TYPE_PRECISION (final_type);
5281 int final_unsignedp = TREE_UNSIGNED (final_type);
5282
5283 /* In addition to the cases of two conversions in a row
5284 handled below, if we are converting something to its own
5285 type via an object of identical or wider precision, neither
5286 conversion is needed. */
5287 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
5288 && ((inter_int && final_int) || (inter_float && final_float))
5289 && inter_prec >= final_prec)
5290 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5291
5292 /* Likewise, if the intermediate and final types are either both
5293 float or both integer, we don't need the middle conversion if
5294 it is wider than the final type and doesn't change the signedness
5295 (for integers). Avoid this if the final type is a pointer
5296 since then we sometimes need the inner conversion. Likewise if
5297 the outer has a precision not equal to the size of its mode. */
5298 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5299 || (inter_float && inside_float))
5300 && inter_prec >= inside_prec
5301 && (inter_float || inter_unsignedp == inside_unsignedp)
5302 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5303 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5304 && ! final_ptr)
5305 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5306
5307 /* If we have a sign-extension of a zero-extended value, we can
5308 replace that by a single zero-extension. */
5309 if (inside_int && inter_int && final_int
5310 && inside_prec < inter_prec && inter_prec < final_prec
5311 && inside_unsignedp && !inter_unsignedp)
5312 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5313
5314 /* Two conversions in a row are not needed unless:
5315 - some conversion is floating-point (overstrict for now), or
5316 - the intermediate type is narrower than both initial and
5317 final, or
5318 - the intermediate type and innermost type differ in signedness,
5319 and the outermost type is wider than the intermediate, or
5320 - the initial type is a pointer type and the precisions of the
5321 intermediate and final types differ, or
5322 - the final type is a pointer type and the precisions of the
5323 initial and intermediate types differ. */
5324 if (! inside_float && ! inter_float && ! final_float
5325 && (inter_prec > inside_prec || inter_prec > final_prec)
5326 && ! (inside_int && inter_int
5327 && inter_unsignedp != inside_unsignedp
5328 && inter_prec < final_prec)
5329 && ((inter_unsignedp && inter_prec > inside_prec)
5330 == (final_unsignedp && final_prec > inter_prec))
5331 && ! (inside_ptr && inter_prec != final_prec)
5332 && ! (final_ptr && inside_prec != inter_prec)
5333 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5334 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5335 && ! final_ptr)
5336 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5337 }
5338
5339 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5340 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5341 /* Detect assigning a bitfield. */
5342 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5343 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5344 {
5345 /* Don't leave an assignment inside a conversion
5346 unless assigning a bitfield. */
5347 tree prev = TREE_OPERAND (t, 0);
5348 if (t == orig_t)
5349 t = copy_node (t);
5350 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
5351 /* First do the assignment, then return converted constant. */
5352 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
5353 TREE_USED (t) = 1;
5354 return t;
5355 }
5356
5357 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5358 constants (if x has signed type, the sign bit cannot be set
5359 in c). This folds extension into the BIT_AND_EXPR. */
5360 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
5361 && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
5362 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5363 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5364 {
5365 tree and = TREE_OPERAND (t, 0);
5366 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5367 int change = 0;
5368
5369 if (TREE_UNSIGNED (TREE_TYPE (and))
5370 || (TYPE_PRECISION (TREE_TYPE (t))
5371 <= TYPE_PRECISION (TREE_TYPE (and))))
5372 change = 1;
5373 else if (TYPE_PRECISION (TREE_TYPE (and1))
5374 <= HOST_BITS_PER_WIDE_INT
5375 && host_integerp (and1, 1))
5376 {
5377 unsigned HOST_WIDE_INT cst;
5378
5379 cst = tree_low_cst (and1, 1);
5380 cst &= (HOST_WIDE_INT) -1
5381 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5382 change = (cst == 0);
5383 #ifdef LOAD_EXTEND_OP
5384 if (change
5385 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5386 == ZERO_EXTEND))
5387 {
5388 tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
5389 and0 = convert (uns, and0);
5390 and1 = convert (uns, and1);
5391 }
5392 #endif
5393 }
5394 if (change)
5395 return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
5396 convert (TREE_TYPE (t), and0),
5397 convert (TREE_TYPE (t), and1)));
5398 }
5399
5400 if (!wins)
5401 {
5402 if (TREE_CONSTANT (t) != TREE_CONSTANT (arg0))
5403 {
5404 if (t == orig_t)
5405 t = copy_node (t);
5406 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
5407 }
5408 return t;
5409 }
5410 return fold_convert (t, arg0);
5411
5412 case VIEW_CONVERT_EXPR:
5413 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5414 return build1 (VIEW_CONVERT_EXPR, type,
5415 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5416 return t;
5417
5418 case COMPONENT_REF:
5419 if (TREE_CODE (arg0) == CONSTRUCTOR
5420 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
5421 {
5422 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5423 if (m)
5424 t = TREE_VALUE (m);
5425 }
5426 return t;
5427
5428 case RANGE_EXPR:
5429 if (TREE_CONSTANT (t) != wins)
5430 {
5431 if (t == orig_t)
5432 t = copy_node (t);
5433 TREE_CONSTANT (t) = wins;
5434 }
5435 return t;
5436
5437 case NEGATE_EXPR:
5438 if (wins)
5439 {
5440 if (TREE_CODE (arg0) == INTEGER_CST)
5441 {
5442 unsigned HOST_WIDE_INT low;
5443 HOST_WIDE_INT high;
5444 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5445 TREE_INT_CST_HIGH (arg0),
5446 &low, &high);
5447 t = build_int_2 (low, high);
5448 TREE_TYPE (t) = type;
5449 TREE_OVERFLOW (t)
5450 = (TREE_OVERFLOW (arg0)
5451 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
5452 TREE_CONSTANT_OVERFLOW (t)
5453 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5454 }
5455 else if (TREE_CODE (arg0) == REAL_CST)
5456 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5457 }
5458 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5459 return TREE_OPERAND (arg0, 0);
5460 /* Convert -((double)float) into (double)(-float). */
5461 else if (TREE_CODE (arg0) == NOP_EXPR
5462 && TREE_CODE (type) == REAL_TYPE)
5463 {
5464 tree targ0 = strip_float_extensions (arg0);
5465 if (targ0 != arg0)
5466 return convert (type, build1 (NEGATE_EXPR, TREE_TYPE (targ0), targ0));
5467
5468 }
5469
5470 /* Convert - (a - b) to (b - a) for non-floating-point. */
5471 else if (TREE_CODE (arg0) == MINUS_EXPR
5472 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
5473 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
5474 TREE_OPERAND (arg0, 0));
5475
5476 /* Convert -f(x) into f(-x) where f is sin, tan or atan. */
5477 switch (builtin_mathfn_code (arg0))
5478 {
5479 case BUILT_IN_SIN:
5480 case BUILT_IN_SINF:
5481 case BUILT_IN_SINL:
5482 case BUILT_IN_TAN:
5483 case BUILT_IN_TANF:
5484 case BUILT_IN_TANL:
5485 case BUILT_IN_ATAN:
5486 case BUILT_IN_ATANF:
5487 case BUILT_IN_ATANL:
5488 if (negate_expr_p (TREE_VALUE (TREE_OPERAND (arg0, 1))))
5489 {
5490 tree fndecl, arg, arglist;
5491
5492 fndecl = get_callee_fndecl (arg0);
5493 arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5494 arg = fold (build1 (NEGATE_EXPR, type, arg));
5495 arglist = build_tree_list (NULL_TREE, arg);
5496 return build_function_call_expr (fndecl, arglist);
5497 }
5498 break;
5499
5500 default:
5501 break;
5502 }
5503 return t;
5504
5505 case ABS_EXPR:
5506 if (wins)
5507 {
5508 if (TREE_CODE (arg0) == INTEGER_CST)
5509 {
5510 /* If the value is unsigned, then the absolute value is
5511 the same as the ordinary value. */
5512 if (TREE_UNSIGNED (type))
5513 return arg0;
5514 /* Similarly, if the value is non-negative. */
5515 else if (INT_CST_LT (integer_minus_one_node, arg0))
5516 return arg0;
5517 /* If the value is negative, then the absolute value is
5518 its negation. */
5519 else
5520 {
5521 unsigned HOST_WIDE_INT low;
5522 HOST_WIDE_INT high;
5523 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5524 TREE_INT_CST_HIGH (arg0),
5525 &low, &high);
5526 t = build_int_2 (low, high);
5527 TREE_TYPE (t) = type;
5528 TREE_OVERFLOW (t)
5529 = (TREE_OVERFLOW (arg0)
5530 | force_fit_type (t, overflow));
5531 TREE_CONSTANT_OVERFLOW (t)
5532 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5533 }
5534 }
5535 else if (TREE_CODE (arg0) == REAL_CST)
5536 {
5537 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5538 t = build_real (type,
5539 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5540 }
5541 }
5542 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5543 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
5544 /* Convert fabs((double)float) into (double)fabsf(float). */
5545 else if (TREE_CODE (arg0) == NOP_EXPR
5546 && TREE_CODE (type) == REAL_TYPE)
5547 {
5548 tree targ0 = strip_float_extensions (arg0);
5549 if (targ0 != arg0)
5550 return convert (type, fold (build1 (ABS_EXPR, TREE_TYPE (targ0),
5551 targ0)));
5552 }
5553 else if (tree_expr_nonnegative_p (arg0))
5554 return arg0;
5555 return t;
5556
5557 case CONJ_EXPR:
5558 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5559 return convert (type, arg0);
5560 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5561 return build (COMPLEX_EXPR, type,
5562 TREE_OPERAND (arg0, 0),
5563 negate_expr (TREE_OPERAND (arg0, 1)));
5564 else if (TREE_CODE (arg0) == COMPLEX_CST)
5565 return build_complex (type, TREE_REALPART (arg0),
5566 negate_expr (TREE_IMAGPART (arg0)));
5567 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5568 return fold (build (TREE_CODE (arg0), type,
5569 fold (build1 (CONJ_EXPR, type,
5570 TREE_OPERAND (arg0, 0))),
5571 fold (build1 (CONJ_EXPR,
5572 type, TREE_OPERAND (arg0, 1)))));
5573 else if (TREE_CODE (arg0) == CONJ_EXPR)
5574 return TREE_OPERAND (arg0, 0);
5575 return t;
5576
5577 case BIT_NOT_EXPR:
5578 if (wins)
5579 {
5580 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5581 ~ TREE_INT_CST_HIGH (arg0));
5582 TREE_TYPE (t) = type;
5583 force_fit_type (t, 0);
5584 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5585 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5586 }
5587 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5588 return TREE_OPERAND (arg0, 0);
5589 return t;
5590
5591 case PLUS_EXPR:
5592 /* A + (-B) -> A - B */
5593 if (TREE_CODE (arg1) == NEGATE_EXPR)
5594 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5595 /* (-A) + B -> B - A */
5596 if (TREE_CODE (arg0) == NEGATE_EXPR)
5597 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5598 else if (! FLOAT_TYPE_P (type))
5599 {
5600 if (integer_zerop (arg1))
5601 return non_lvalue (convert (type, arg0));
5602
5603 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5604 with a constant, and the two constants have no bits in common,
5605 we should treat this as a BIT_IOR_EXPR since this may produce more
5606 simplifications. */
5607 if (TREE_CODE (arg0) == BIT_AND_EXPR
5608 && TREE_CODE (arg1) == BIT_AND_EXPR
5609 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5610 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5611 && integer_zerop (const_binop (BIT_AND_EXPR,
5612 TREE_OPERAND (arg0, 1),
5613 TREE_OPERAND (arg1, 1), 0)))
5614 {
5615 code = BIT_IOR_EXPR;
5616 goto bit_ior;
5617 }
5618
5619 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5620 (plus (plus (mult) (mult)) (foo)) so that we can
5621 take advantage of the factoring cases below. */
5622 if ((TREE_CODE (arg0) == PLUS_EXPR
5623 && TREE_CODE (arg1) == MULT_EXPR)
5624 || (TREE_CODE (arg1) == PLUS_EXPR
5625 && TREE_CODE (arg0) == MULT_EXPR))
5626 {
5627 tree parg0, parg1, parg, marg;
5628
5629 if (TREE_CODE (arg0) == PLUS_EXPR)
5630 parg = arg0, marg = arg1;
5631 else
5632 parg = arg1, marg = arg0;
5633 parg0 = TREE_OPERAND (parg, 0);
5634 parg1 = TREE_OPERAND (parg, 1);
5635 STRIP_NOPS (parg0);
5636 STRIP_NOPS (parg1);
5637
5638 if (TREE_CODE (parg0) == MULT_EXPR
5639 && TREE_CODE (parg1) != MULT_EXPR)
5640 return fold (build (PLUS_EXPR, type,
5641 fold (build (PLUS_EXPR, type,
5642 convert (type, parg0),
5643 convert (type, marg))),
5644 convert (type, parg1)));
5645 if (TREE_CODE (parg0) != MULT_EXPR
5646 && TREE_CODE (parg1) == MULT_EXPR)
5647 return fold (build (PLUS_EXPR, type,
5648 fold (build (PLUS_EXPR, type,
5649 convert (type, parg1),
5650 convert (type, marg))),
5651 convert (type, parg0)));
5652 }
5653
5654 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5655 {
5656 tree arg00, arg01, arg10, arg11;
5657 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5658
5659 /* (A * C) + (B * C) -> (A+B) * C.
5660 We are most concerned about the case where C is a constant,
5661 but other combinations show up during loop reduction. Since
5662 it is not difficult, try all four possibilities. */
5663
5664 arg00 = TREE_OPERAND (arg0, 0);
5665 arg01 = TREE_OPERAND (arg0, 1);
5666 arg10 = TREE_OPERAND (arg1, 0);
5667 arg11 = TREE_OPERAND (arg1, 1);
5668 same = NULL_TREE;
5669
5670 if (operand_equal_p (arg01, arg11, 0))
5671 same = arg01, alt0 = arg00, alt1 = arg10;
5672 else if (operand_equal_p (arg00, arg10, 0))
5673 same = arg00, alt0 = arg01, alt1 = arg11;
5674 else if (operand_equal_p (arg00, arg11, 0))
5675 same = arg00, alt0 = arg01, alt1 = arg10;
5676 else if (operand_equal_p (arg01, arg10, 0))
5677 same = arg01, alt0 = arg00, alt1 = arg11;
5678
5679 /* No identical multiplicands; see if we can find a common
5680 power-of-two factor in non-power-of-two multiplies. This
5681 can help in multi-dimensional array access. */
5682 else if (TREE_CODE (arg01) == INTEGER_CST
5683 && TREE_CODE (arg11) == INTEGER_CST
5684 && TREE_INT_CST_HIGH (arg01) == 0
5685 && TREE_INT_CST_HIGH (arg11) == 0)
5686 {
5687 HOST_WIDE_INT int01, int11, tmp;
5688 int01 = TREE_INT_CST_LOW (arg01);
5689 int11 = TREE_INT_CST_LOW (arg11);
5690
5691 /* Move min of absolute values to int11. */
5692 if ((int01 >= 0 ? int01 : -int01)
5693 < (int11 >= 0 ? int11 : -int11))
5694 {
5695 tmp = int01, int01 = int11, int11 = tmp;
5696 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5697 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5698 }
5699
5700 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5701 {
5702 alt0 = fold (build (MULT_EXPR, type, arg00,
5703 build_int_2 (int01 / int11, 0)));
5704 alt1 = arg10;
5705 same = arg11;
5706 }
5707 }
5708
5709 if (same)
5710 return fold (build (MULT_EXPR, type,
5711 fold (build (PLUS_EXPR, type, alt0, alt1)),
5712 same));
5713 }
5714 }
5715 else
5716 {
5717 /* See if ARG1 is zero and X + ARG1 reduces to X. */
5718 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
5719 return non_lvalue (convert (type, arg0));
5720
5721 /* Likewise if the operands are reversed. */
5722 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5723 return non_lvalue (convert (type, arg1));
5724
5725 /* Convert x+x into x*2.0. */
5726 if (operand_equal_p (arg0, arg1, 0)
5727 && SCALAR_FLOAT_TYPE_P (type))
5728 return fold (build (MULT_EXPR, type, arg0,
5729 build_real (type, dconst2)));
5730
5731 /* Convert x*c+x into x*(c+1). */
5732 if (flag_unsafe_math_optimizations
5733 && TREE_CODE (arg0) == MULT_EXPR
5734 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
5735 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
5736 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
5737 {
5738 REAL_VALUE_TYPE c;
5739
5740 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
5741 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
5742 return fold (build (MULT_EXPR, type, arg1,
5743 build_real (type, c)));
5744 }
5745
5746 /* Convert x+x*c into x*(c+1). */
5747 if (flag_unsafe_math_optimizations
5748 && TREE_CODE (arg1) == MULT_EXPR
5749 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
5750 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
5751 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
5752 {
5753 REAL_VALUE_TYPE c;
5754
5755 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
5756 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
5757 return fold (build (MULT_EXPR, type, arg0,
5758 build_real (type, c)));
5759 }
5760
5761 /* Convert x*c1+x*c2 into x*(c1+c2). */
5762 if (flag_unsafe_math_optimizations
5763 && TREE_CODE (arg0) == MULT_EXPR
5764 && TREE_CODE (arg1) == MULT_EXPR
5765 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
5766 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
5767 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
5768 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
5769 && operand_equal_p (TREE_OPERAND (arg0, 0),
5770 TREE_OPERAND (arg1, 0), 0))
5771 {
5772 REAL_VALUE_TYPE c1, c2;
5773
5774 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
5775 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
5776 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
5777 return fold (build (MULT_EXPR, type,
5778 TREE_OPERAND (arg0, 0),
5779 build_real (type, c1)));
5780 }
5781 }
5782
5783 bit_rotate:
5784 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5785 is a rotate of A by C1 bits. */
5786 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5787 is a rotate of A by B bits. */
5788 {
5789 enum tree_code code0, code1;
5790 code0 = TREE_CODE (arg0);
5791 code1 = TREE_CODE (arg1);
5792 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5793 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5794 && operand_equal_p (TREE_OPERAND (arg0, 0),
5795 TREE_OPERAND (arg1, 0), 0)
5796 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5797 {
5798 tree tree01, tree11;
5799 enum tree_code code01, code11;
5800
5801 tree01 = TREE_OPERAND (arg0, 1);
5802 tree11 = TREE_OPERAND (arg1, 1);
5803 STRIP_NOPS (tree01);
5804 STRIP_NOPS (tree11);
5805 code01 = TREE_CODE (tree01);
5806 code11 = TREE_CODE (tree11);
5807 if (code01 == INTEGER_CST
5808 && code11 == INTEGER_CST
5809 && TREE_INT_CST_HIGH (tree01) == 0
5810 && TREE_INT_CST_HIGH (tree11) == 0
5811 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5812 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5813 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5814 code0 == LSHIFT_EXPR ? tree01 : tree11);
5815 else if (code11 == MINUS_EXPR)
5816 {
5817 tree tree110, tree111;
5818 tree110 = TREE_OPERAND (tree11, 0);
5819 tree111 = TREE_OPERAND (tree11, 1);
5820 STRIP_NOPS (tree110);
5821 STRIP_NOPS (tree111);
5822 if (TREE_CODE (tree110) == INTEGER_CST
5823 && 0 == compare_tree_int (tree110,
5824 TYPE_PRECISION
5825 (TREE_TYPE (TREE_OPERAND
5826 (arg0, 0))))
5827 && operand_equal_p (tree01, tree111, 0))
5828 return build ((code0 == LSHIFT_EXPR
5829 ? LROTATE_EXPR
5830 : RROTATE_EXPR),
5831 type, TREE_OPERAND (arg0, 0), tree01);
5832 }
5833 else if (code01 == MINUS_EXPR)
5834 {
5835 tree tree010, tree011;
5836 tree010 = TREE_OPERAND (tree01, 0);
5837 tree011 = TREE_OPERAND (tree01, 1);
5838 STRIP_NOPS (tree010);
5839 STRIP_NOPS (tree011);
5840 if (TREE_CODE (tree010) == INTEGER_CST
5841 && 0 == compare_tree_int (tree010,
5842 TYPE_PRECISION
5843 (TREE_TYPE (TREE_OPERAND
5844 (arg0, 0))))
5845 && operand_equal_p (tree11, tree011, 0))
5846 return build ((code0 != LSHIFT_EXPR
5847 ? LROTATE_EXPR
5848 : RROTATE_EXPR),
5849 type, TREE_OPERAND (arg0, 0), tree11);
5850 }
5851 }
5852 }
5853
5854 associate:
5855 /* In most languages, can't associate operations on floats through
5856 parentheses. Rather than remember where the parentheses were, we
5857 don't associate floats at all, unless the user has specified
5858 -funsafe-math-optimizations. */
5859
5860 if (! wins
5861 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
5862 {
5863 tree var0, con0, lit0, minus_lit0;
5864 tree var1, con1, lit1, minus_lit1;
5865
5866 /* Split both trees into variables, constants, and literals. Then
5867 associate each group together, the constants with literals,
5868 then the result with variables. This increases the chances of
5869 literals being recombined later and of generating relocatable
5870 expressions for the sum of a constant and literal. */
5871 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
5872 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
5873 code == MINUS_EXPR);
5874
5875 /* Only do something if we found more than two objects. Otherwise,
5876 nothing has changed and we risk infinite recursion. */
5877 if (2 < ((var0 != 0) + (var1 != 0)
5878 + (con0 != 0) + (con1 != 0)
5879 + (lit0 != 0) + (lit1 != 0)
5880 + (minus_lit0 != 0) + (minus_lit1 != 0)))
5881 {
5882 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
5883 if (code == MINUS_EXPR)
5884 code = PLUS_EXPR;
5885
5886 var0 = associate_trees (var0, var1, code, type);
5887 con0 = associate_trees (con0, con1, code, type);
5888 lit0 = associate_trees (lit0, lit1, code, type);
5889 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
5890
5891 /* Preserve the MINUS_EXPR if the negative part of the literal is
5892 greater than the positive part. Otherwise, the multiplicative
5893 folding code (i.e extract_muldiv) may be fooled in case
5894 unsigned constants are subtracted, like in the following
5895 example: ((X*2 + 4) - 8U)/2. */
5896 if (minus_lit0 && lit0)
5897 {
5898 if (TREE_CODE (lit0) == INTEGER_CST
5899 && TREE_CODE (minus_lit0) == INTEGER_CST
5900 && tree_int_cst_lt (lit0, minus_lit0))
5901 {
5902 minus_lit0 = associate_trees (minus_lit0, lit0,
5903 MINUS_EXPR, type);
5904 lit0 = 0;
5905 }
5906 else
5907 {
5908 lit0 = associate_trees (lit0, minus_lit0,
5909 MINUS_EXPR, type);
5910 minus_lit0 = 0;
5911 }
5912 }
5913 if (minus_lit0)
5914 {
5915 if (con0 == 0)
5916 return convert (type, associate_trees (var0, minus_lit0,
5917 MINUS_EXPR, type));
5918 else
5919 {
5920 con0 = associate_trees (con0, minus_lit0,
5921 MINUS_EXPR, type);
5922 return convert (type, associate_trees (var0, con0,
5923 PLUS_EXPR, type));
5924 }
5925 }
5926
5927 con0 = associate_trees (con0, lit0, code, type);
5928 return convert (type, associate_trees (var0, con0, code, type));
5929 }
5930 }
5931
5932 binary:
5933 if (wins)
5934 t1 = const_binop (code, arg0, arg1, 0);
5935 if (t1 != NULL_TREE)
5936 {
5937 /* The return value should always have
5938 the same type as the original expression. */
5939 if (TREE_TYPE (t1) != TREE_TYPE (t))
5940 t1 = convert (TREE_TYPE (t), t1);
5941
5942 return t1;
5943 }
5944 return t;
5945
5946 case MINUS_EXPR:
5947 /* A - (-B) -> A + B */
5948 if (TREE_CODE (arg1) == NEGATE_EXPR)
5949 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5950 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
5951 if (TREE_CODE (arg0) == NEGATE_EXPR
5952 && (FLOAT_TYPE_P (type)
5953 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
5954 && negate_expr_p (arg1)
5955 && (! TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
5956 && (! TREE_SIDE_EFFECTS (arg1) || TREE_CONSTANT (arg0)))
5957 return fold (build (MINUS_EXPR, type, negate_expr (arg1),
5958 TREE_OPERAND (arg0, 0)));
5959
5960 if (! FLOAT_TYPE_P (type))
5961 {
5962 if (! wins && integer_zerop (arg0))
5963 return negate_expr (convert (type, arg1));
5964 if (integer_zerop (arg1))
5965 return non_lvalue (convert (type, arg0));
5966
5967 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
5968 about the case where C is a constant, just try one of the
5969 four possibilities. */
5970
5971 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
5972 && operand_equal_p (TREE_OPERAND (arg0, 1),
5973 TREE_OPERAND (arg1, 1), 0))
5974 return fold (build (MULT_EXPR, type,
5975 fold (build (MINUS_EXPR, type,
5976 TREE_OPERAND (arg0, 0),
5977 TREE_OPERAND (arg1, 0))),
5978 TREE_OPERAND (arg0, 1)));
5979
5980 /* Fold A - (A & B) into ~B & A. */
5981 if (!TREE_SIDE_EFFECTS (arg0)
5982 && TREE_CODE (arg1) == BIT_AND_EXPR)
5983 {
5984 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
5985 return fold (build (BIT_AND_EXPR, type,
5986 fold (build1 (BIT_NOT_EXPR, type,
5987 TREE_OPERAND (arg1, 0))),
5988 arg0));
5989 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
5990 return fold (build (BIT_AND_EXPR, type,
5991 fold (build1 (BIT_NOT_EXPR, type,
5992 TREE_OPERAND (arg1, 1))),
5993 arg0));
5994 }
5995 }
5996
5997 /* See if ARG1 is zero and X - ARG1 reduces to X. */
5998 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
5999 return non_lvalue (convert (type, arg0));
6000
6001 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6002 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6003 (-ARG1 + ARG0) reduces to -ARG1. */
6004 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6005 return negate_expr (convert (type, arg1));
6006
6007 /* Fold &x - &x. This can happen from &x.foo - &x.
6008 This is unsafe for certain floats even in non-IEEE formats.
6009 In IEEE, it is unsafe because it does wrong for NaNs.
6010 Also note that operand_equal_p is always false if an operand
6011 is volatile. */
6012
6013 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6014 && operand_equal_p (arg0, arg1, 0))
6015 return convert (type, integer_zero_node);
6016
6017 goto associate;
6018
6019 case MULT_EXPR:
6020 /* (-A) * (-B) -> A * B */
6021 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6022 return fold (build (MULT_EXPR, type,
6023 TREE_OPERAND (arg0, 0),
6024 negate_expr (arg1)));
6025 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6026 return fold (build (MULT_EXPR, type,
6027 negate_expr (arg0),
6028 TREE_OPERAND (arg1, 0)));
6029
6030 if (! FLOAT_TYPE_P (type))
6031 {
6032 if (integer_zerop (arg1))
6033 return omit_one_operand (type, arg1, arg0);
6034 if (integer_onep (arg1))
6035 return non_lvalue (convert (type, arg0));
6036
6037 /* (a * (1 << b)) is (a << b) */
6038 if (TREE_CODE (arg1) == LSHIFT_EXPR
6039 && integer_onep (TREE_OPERAND (arg1, 0)))
6040 return fold (build (LSHIFT_EXPR, type, arg0,
6041 TREE_OPERAND (arg1, 1)));
6042 if (TREE_CODE (arg0) == LSHIFT_EXPR
6043 && integer_onep (TREE_OPERAND (arg0, 0)))
6044 return fold (build (LSHIFT_EXPR, type, arg1,
6045 TREE_OPERAND (arg0, 1)));
6046
6047 if (TREE_CODE (arg1) == INTEGER_CST
6048 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6049 convert (type, arg1),
6050 code, NULL_TREE)))
6051 return convert (type, tem);
6052
6053 }
6054 else
6055 {
6056 /* Maybe fold x * 0 to 0. The expressions aren't the same
6057 when x is NaN, since x * 0 is also NaN. Nor are they the
6058 same in modes with signed zeros, since multiplying a
6059 negative value by 0 gives -0, not +0. */
6060 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6061 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6062 && real_zerop (arg1))
6063 return omit_one_operand (type, arg1, arg0);
6064 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6065 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6066 && real_onep (arg1))
6067 return non_lvalue (convert (type, arg0));
6068
6069 /* Transform x * -1.0 into -x. */
6070 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6071 && real_minus_onep (arg1))
6072 return fold (build1 (NEGATE_EXPR, type, arg0));
6073
6074 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6075 if (flag_unsafe_math_optimizations
6076 && TREE_CODE (arg0) == RDIV_EXPR
6077 && TREE_CODE (arg1) == REAL_CST
6078 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
6079 {
6080 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
6081 arg1, 0);
6082 if (tem)
6083 return fold (build (RDIV_EXPR, type, tem,
6084 TREE_OPERAND (arg0, 1)));
6085 }
6086
6087 if (flag_unsafe_math_optimizations)
6088 {
6089 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6090 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6091
6092 /* Optimizations of sqrt(...)*sqrt(...). */
6093 if ((fcode0 == BUILT_IN_SQRT && fcode1 == BUILT_IN_SQRT)
6094 || (fcode0 == BUILT_IN_SQRTF && fcode1 == BUILT_IN_SQRTF)
6095 || (fcode0 == BUILT_IN_SQRTL && fcode1 == BUILT_IN_SQRTL))
6096 {
6097 tree sqrtfn, arg, arglist;
6098 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6099 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6100
6101 /* Optimize sqrt(x)*sqrt(x) as x. */
6102 if (operand_equal_p (arg00, arg10, 0)
6103 && ! HONOR_SNANS (TYPE_MODE (type)))
6104 return arg00;
6105
6106 /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y). */
6107 sqrtfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6108 arg = fold (build (MULT_EXPR, type, arg00, arg10));
6109 arglist = build_tree_list (NULL_TREE, arg);
6110 return build_function_call_expr (sqrtfn, arglist);
6111 }
6112
6113 /* Optimize expN(x)*expN(y) as expN(x+y). */
6114 if (fcode0 == fcode1
6115 && (fcode0 == BUILT_IN_EXP
6116 || fcode0 == BUILT_IN_EXPF
6117 || fcode0 == BUILT_IN_EXPL
6118 || fcode0 == BUILT_IN_EXP2
6119 || fcode0 == BUILT_IN_EXP2F
6120 || fcode0 == BUILT_IN_EXP2L
6121 || fcode0 == BUILT_IN_EXP10
6122 || fcode0 == BUILT_IN_EXP10F
6123 || fcode0 == BUILT_IN_EXP10L
6124 || fcode0 == BUILT_IN_POW10
6125 || fcode0 == BUILT_IN_POW10F
6126 || fcode0 == BUILT_IN_POW10L))
6127 {
6128 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6129 tree arg = build (PLUS_EXPR, type,
6130 TREE_VALUE (TREE_OPERAND (arg0, 1)),
6131 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6132 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6133 return build_function_call_expr (expfn, arglist);
6134 }
6135
6136 /* Optimizations of pow(...)*pow(...). */
6137 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6138 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6139 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6140 {
6141 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6142 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6143 1)));
6144 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6145 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6146 1)));
6147
6148 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6149 if (operand_equal_p (arg01, arg11, 0))
6150 {
6151 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6152 tree arg = build (MULT_EXPR, type, arg00, arg10);
6153 tree arglist = tree_cons (NULL_TREE, fold (arg),
6154 build_tree_list (NULL_TREE,
6155 arg01));
6156 return build_function_call_expr (powfn, arglist);
6157 }
6158
6159 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6160 if (operand_equal_p (arg00, arg10, 0))
6161 {
6162 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6163 tree arg = fold (build (PLUS_EXPR, type, arg01, arg11));
6164 tree arglist = tree_cons (NULL_TREE, arg00,
6165 build_tree_list (NULL_TREE,
6166 arg));
6167 return build_function_call_expr (powfn, arglist);
6168 }
6169 }
6170
6171 /* Optimize tan(x)*cos(x) as sin(x). */
6172 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6173 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6174 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6175 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6176 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6177 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6178 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6179 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6180 {
6181 tree sinfn;
6182
6183 switch (fcode0)
6184 {
6185 case BUILT_IN_TAN:
6186 case BUILT_IN_COS:
6187 sinfn = implicit_built_in_decls[BUILT_IN_SIN];
6188 break;
6189 case BUILT_IN_TANF:
6190 case BUILT_IN_COSF:
6191 sinfn = implicit_built_in_decls[BUILT_IN_SINF];
6192 break;
6193 case BUILT_IN_TANL:
6194 case BUILT_IN_COSL:
6195 sinfn = implicit_built_in_decls[BUILT_IN_SINL];
6196 break;
6197 default:
6198 sinfn = NULL_TREE;
6199 }
6200
6201 if (sinfn != NULL_TREE)
6202 return build_function_call_expr (sinfn,
6203 TREE_OPERAND (arg0, 1));
6204 }
6205
6206 /* Optimize x*pow(x,c) as pow(x,c+1). */
6207 if (fcode1 == BUILT_IN_POW
6208 || fcode1 == BUILT_IN_POWF
6209 || fcode1 == BUILT_IN_POWL)
6210 {
6211 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6212 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6213 1)));
6214 if (TREE_CODE (arg11) == REAL_CST
6215 && ! TREE_CONSTANT_OVERFLOW (arg11)
6216 && operand_equal_p (arg0, arg10, 0))
6217 {
6218 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6219 REAL_VALUE_TYPE c;
6220 tree arg, arglist;
6221
6222 c = TREE_REAL_CST (arg11);
6223 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6224 arg = build_real (type, c);
6225 arglist = build_tree_list (NULL_TREE, arg);
6226 arglist = tree_cons (NULL_TREE, arg0, arglist);
6227 return build_function_call_expr (powfn, arglist);
6228 }
6229 }
6230
6231 /* Optimize pow(x,c)*x as pow(x,c+1). */
6232 if (fcode0 == BUILT_IN_POW
6233 || fcode0 == BUILT_IN_POWF
6234 || fcode0 == BUILT_IN_POWL)
6235 {
6236 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6237 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6238 1)));
6239 if (TREE_CODE (arg01) == REAL_CST
6240 && ! TREE_CONSTANT_OVERFLOW (arg01)
6241 && operand_equal_p (arg1, arg00, 0))
6242 {
6243 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6244 REAL_VALUE_TYPE c;
6245 tree arg, arglist;
6246
6247 c = TREE_REAL_CST (arg01);
6248 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6249 arg = build_real (type, c);
6250 arglist = build_tree_list (NULL_TREE, arg);
6251 arglist = tree_cons (NULL_TREE, arg1, arglist);
6252 return build_function_call_expr (powfn, arglist);
6253 }
6254 }
6255
6256 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6257 if (! optimize_size
6258 && operand_equal_p (arg0, arg1, 0))
6259 {
6260 tree powfn;
6261
6262 if (type == double_type_node)
6263 powfn = implicit_built_in_decls[BUILT_IN_POW];
6264 else if (type == float_type_node)
6265 powfn = implicit_built_in_decls[BUILT_IN_POWF];
6266 else if (type == long_double_type_node)
6267 powfn = implicit_built_in_decls[BUILT_IN_POWL];
6268 else
6269 powfn = NULL_TREE;
6270
6271 if (powfn)
6272 {
6273 tree arg = build_real (type, dconst2);
6274 tree arglist = build_tree_list (NULL_TREE, arg);
6275 arglist = tree_cons (NULL_TREE, arg0, arglist);
6276 return build_function_call_expr (powfn, arglist);
6277 }
6278 }
6279 }
6280 }
6281 goto associate;
6282
6283 case BIT_IOR_EXPR:
6284 bit_ior:
6285 if (integer_all_onesp (arg1))
6286 return omit_one_operand (type, arg1, arg0);
6287 if (integer_zerop (arg1))
6288 return non_lvalue (convert (type, arg0));
6289 t1 = distribute_bit_expr (code, type, arg0, arg1);
6290 if (t1 != NULL_TREE)
6291 return t1;
6292
6293 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6294
6295 This results in more efficient code for machines without a NAND
6296 instruction. Combine will canonicalize to the first form
6297 which will allow use of NAND instructions provided by the
6298 backend if they exist. */
6299 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6300 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6301 {
6302 return fold (build1 (BIT_NOT_EXPR, type,
6303 build (BIT_AND_EXPR, type,
6304 TREE_OPERAND (arg0, 0),
6305 TREE_OPERAND (arg1, 0))));
6306 }
6307
6308 /* See if this can be simplified into a rotate first. If that
6309 is unsuccessful continue in the association code. */
6310 goto bit_rotate;
6311
6312 case BIT_XOR_EXPR:
6313 if (integer_zerop (arg1))
6314 return non_lvalue (convert (type, arg0));
6315 if (integer_all_onesp (arg1))
6316 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6317
6318 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6319 with a constant, and the two constants have no bits in common,
6320 we should treat this as a BIT_IOR_EXPR since this may produce more
6321 simplifications. */
6322 if (TREE_CODE (arg0) == BIT_AND_EXPR
6323 && TREE_CODE (arg1) == BIT_AND_EXPR
6324 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6325 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6326 && integer_zerop (const_binop (BIT_AND_EXPR,
6327 TREE_OPERAND (arg0, 1),
6328 TREE_OPERAND (arg1, 1), 0)))
6329 {
6330 code = BIT_IOR_EXPR;
6331 goto bit_ior;
6332 }
6333
6334 /* See if this can be simplified into a rotate first. If that
6335 is unsuccessful continue in the association code. */
6336 goto bit_rotate;
6337
6338 case BIT_AND_EXPR:
6339 if (integer_all_onesp (arg1))
6340 return non_lvalue (convert (type, arg0));
6341 if (integer_zerop (arg1))
6342 return omit_one_operand (type, arg1, arg0);
6343 t1 = distribute_bit_expr (code, type, arg0, arg1);
6344 if (t1 != NULL_TREE)
6345 return t1;
6346 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6347 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6348 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6349 {
6350 unsigned int prec
6351 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6352
6353 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6354 && (~TREE_INT_CST_LOW (arg1)
6355 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6356 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
6357 }
6358
6359 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6360
6361 This results in more efficient code for machines without a NOR
6362 instruction. Combine will canonicalize to the first form
6363 which will allow use of NOR instructions provided by the
6364 backend if they exist. */
6365 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6366 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6367 {
6368 return fold (build1 (BIT_NOT_EXPR, type,
6369 build (BIT_IOR_EXPR, type,
6370 TREE_OPERAND (arg0, 0),
6371 TREE_OPERAND (arg1, 0))));
6372 }
6373
6374 goto associate;
6375
6376 case RDIV_EXPR:
6377 /* Don't touch a floating-point divide by zero unless the mode
6378 of the constant can represent infinity. */
6379 if (TREE_CODE (arg1) == REAL_CST
6380 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6381 && real_zerop (arg1))
6382 return t;
6383
6384 /* (-A) / (-B) -> A / B */
6385 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6386 return fold (build (RDIV_EXPR, type,
6387 TREE_OPERAND (arg0, 0),
6388 negate_expr (arg1)));
6389 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6390 return fold (build (RDIV_EXPR, type,
6391 negate_expr (arg0),
6392 TREE_OPERAND (arg1, 0)));
6393
6394 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6395 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6396 && real_onep (arg1))
6397 return non_lvalue (convert (type, arg0));
6398
6399 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
6400 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6401 && real_minus_onep (arg1))
6402 return non_lvalue (convert (type, negate_expr (arg0)));
6403
6404 /* If ARG1 is a constant, we can convert this to a multiply by the
6405 reciprocal. This does not have the same rounding properties,
6406 so only do this if -funsafe-math-optimizations. We can actually
6407 always safely do it if ARG1 is a power of two, but it's hard to
6408 tell if it is or not in a portable manner. */
6409 if (TREE_CODE (arg1) == REAL_CST)
6410 {
6411 if (flag_unsafe_math_optimizations
6412 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6413 arg1, 0)))
6414 return fold (build (MULT_EXPR, type, arg0, tem));
6415 /* Find the reciprocal if optimizing and the result is exact. */
6416 if (optimize)
6417 {
6418 REAL_VALUE_TYPE r;
6419 r = TREE_REAL_CST (arg1);
6420 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6421 {
6422 tem = build_real (type, r);
6423 return fold (build (MULT_EXPR, type, arg0, tem));
6424 }
6425 }
6426 }
6427 /* Convert A/B/C to A/(B*C). */
6428 if (flag_unsafe_math_optimizations
6429 && TREE_CODE (arg0) == RDIV_EXPR)
6430 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6431 fold (build (MULT_EXPR, type,
6432 TREE_OPERAND (arg0, 1), arg1))));
6433
6434 /* Convert A/(B/C) to (A/B)*C. */
6435 if (flag_unsafe_math_optimizations
6436 && TREE_CODE (arg1) == RDIV_EXPR)
6437 return fold (build (MULT_EXPR, type,
6438 fold (build (RDIV_EXPR, type, arg0,
6439 TREE_OPERAND (arg1, 0))),
6440 TREE_OPERAND (arg1, 1)));
6441
6442 /* Convert C1/(X*C2) into (C1/C2)/X. */
6443 if (flag_unsafe_math_optimizations
6444 && TREE_CODE (arg1) == MULT_EXPR
6445 && TREE_CODE (arg0) == REAL_CST
6446 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
6447 {
6448 tree tem = const_binop (RDIV_EXPR, arg0,
6449 TREE_OPERAND (arg1, 1), 0);
6450 if (tem)
6451 return fold (build (RDIV_EXPR, type, tem,
6452 TREE_OPERAND (arg1, 0)));
6453 }
6454
6455 if (flag_unsafe_math_optimizations)
6456 {
6457 enum built_in_function fcode = builtin_mathfn_code (arg1);
6458 /* Optimize x/expN(y) into x*expN(-y). */
6459 if (fcode == BUILT_IN_EXP
6460 || fcode == BUILT_IN_EXPF
6461 || fcode == BUILT_IN_EXPL
6462 || fcode == BUILT_IN_EXP2
6463 || fcode == BUILT_IN_EXP2F
6464 || fcode == BUILT_IN_EXP2L
6465 || fcode == BUILT_IN_EXP10
6466 || fcode == BUILT_IN_EXP10F
6467 || fcode == BUILT_IN_EXP10L
6468 || fcode == BUILT_IN_POW10
6469 || fcode == BUILT_IN_POW10F
6470 || fcode == BUILT_IN_POW10L)
6471 {
6472 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6473 tree arg = build1 (NEGATE_EXPR, type,
6474 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6475 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6476 arg1 = build_function_call_expr (expfn, arglist);
6477 return fold (build (MULT_EXPR, type, arg0, arg1));
6478 }
6479
6480 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6481 if (fcode == BUILT_IN_POW
6482 || fcode == BUILT_IN_POWF
6483 || fcode == BUILT_IN_POWL)
6484 {
6485 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6486 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6487 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6488 tree neg11 = fold (build1 (NEGATE_EXPR, type, arg11));
6489 tree arglist = tree_cons(NULL_TREE, arg10,
6490 build_tree_list (NULL_TREE, neg11));
6491 arg1 = build_function_call_expr (powfn, arglist);
6492 return fold (build (MULT_EXPR, type, arg0, arg1));
6493 }
6494 }
6495
6496 if (flag_unsafe_math_optimizations)
6497 {
6498 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6499 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6500
6501 /* Optimize sin(x)/cos(x) as tan(x). */
6502 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
6503 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
6504 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
6505 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6506 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6507 {
6508 tree tanfn;
6509
6510 if (fcode0 == BUILT_IN_SIN)
6511 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6512 else if (fcode0 == BUILT_IN_SINF)
6513 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6514 else if (fcode0 == BUILT_IN_SINL)
6515 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6516 else
6517 tanfn = NULL_TREE;
6518
6519 if (tanfn != NULL_TREE)
6520 return build_function_call_expr (tanfn,
6521 TREE_OPERAND (arg0, 1));
6522 }
6523
6524 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
6525 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
6526 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
6527 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
6528 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6529 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6530 {
6531 tree tanfn;
6532
6533 if (fcode0 == BUILT_IN_COS)
6534 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6535 else if (fcode0 == BUILT_IN_COSF)
6536 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6537 else if (fcode0 == BUILT_IN_COSL)
6538 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6539 else
6540 tanfn = NULL_TREE;
6541
6542 if (tanfn != NULL_TREE)
6543 {
6544 tree tmp = TREE_OPERAND (arg0, 1);
6545 tmp = build_function_call_expr (tanfn, tmp);
6546 return fold (build (RDIV_EXPR, type,
6547 build_real (type, dconst1),
6548 tmp));
6549 }
6550 }
6551
6552 /* Optimize pow(x,c)/x as pow(x,c-1). */
6553 if (fcode0 == BUILT_IN_POW
6554 || fcode0 == BUILT_IN_POWF
6555 || fcode0 == BUILT_IN_POWL)
6556 {
6557 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6558 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
6559 if (TREE_CODE (arg01) == REAL_CST
6560 && ! TREE_CONSTANT_OVERFLOW (arg01)
6561 && operand_equal_p (arg1, arg00, 0))
6562 {
6563 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6564 REAL_VALUE_TYPE c;
6565 tree arg, arglist;
6566
6567 c = TREE_REAL_CST (arg01);
6568 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
6569 arg = build_real (type, c);
6570 arglist = build_tree_list (NULL_TREE, arg);
6571 arglist = tree_cons (NULL_TREE, arg1, arglist);
6572 return build_function_call_expr (powfn, arglist);
6573 }
6574 }
6575 }
6576 goto binary;
6577
6578 case TRUNC_DIV_EXPR:
6579 case ROUND_DIV_EXPR:
6580 case FLOOR_DIV_EXPR:
6581 case CEIL_DIV_EXPR:
6582 case EXACT_DIV_EXPR:
6583 if (integer_onep (arg1))
6584 return non_lvalue (convert (type, arg0));
6585 if (integer_zerop (arg1))
6586 return t;
6587
6588 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6589 operation, EXACT_DIV_EXPR.
6590
6591 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6592 At one time others generated faster code, it's not clear if they do
6593 after the last round to changes to the DIV code in expmed.c. */
6594 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
6595 && multiple_of_p (type, arg0, arg1))
6596 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
6597
6598 if (TREE_CODE (arg1) == INTEGER_CST
6599 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6600 code, NULL_TREE)))
6601 return convert (type, tem);
6602
6603 goto binary;
6604
6605 case CEIL_MOD_EXPR:
6606 case FLOOR_MOD_EXPR:
6607 case ROUND_MOD_EXPR:
6608 case TRUNC_MOD_EXPR:
6609 if (integer_onep (arg1))
6610 return omit_one_operand (type, integer_zero_node, arg0);
6611 if (integer_zerop (arg1))
6612 return t;
6613
6614 if (TREE_CODE (arg1) == INTEGER_CST
6615 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6616 code, NULL_TREE)))
6617 return convert (type, tem);
6618
6619 goto binary;
6620
6621 case LROTATE_EXPR:
6622 case RROTATE_EXPR:
6623 if (integer_all_onesp (arg0))
6624 return omit_one_operand (type, arg0, arg1);
6625 goto shift;
6626
6627 case RSHIFT_EXPR:
6628 /* Optimize -1 >> x for arithmetic right shifts. */
6629 if (integer_all_onesp (arg0) && ! TREE_UNSIGNED (type))
6630 return omit_one_operand (type, arg0, arg1);
6631 /* ... fall through ... */
6632
6633 case LSHIFT_EXPR:
6634 shift:
6635 if (integer_zerop (arg1))
6636 return non_lvalue (convert (type, arg0));
6637 if (integer_zerop (arg0))
6638 return omit_one_operand (type, arg0, arg1);
6639
6640 /* Since negative shift count is not well-defined,
6641 don't try to compute it in the compiler. */
6642 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
6643 return t;
6644 /* Rewrite an LROTATE_EXPR by a constant into an
6645 RROTATE_EXPR by a new constant. */
6646 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
6647 {
6648 if (t == orig_t)
6649 t = copy_node (t);
6650 TREE_SET_CODE (t, RROTATE_EXPR);
6651 code = RROTATE_EXPR;
6652 TREE_OPERAND (t, 1) = arg1
6653 = const_binop
6654 (MINUS_EXPR,
6655 convert (TREE_TYPE (arg1),
6656 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
6657 arg1, 0);
6658 if (tree_int_cst_sgn (arg1) < 0)
6659 return t;
6660 }
6661
6662 /* If we have a rotate of a bit operation with the rotate count and
6663 the second operand of the bit operation both constant,
6664 permute the two operations. */
6665 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6666 && (TREE_CODE (arg0) == BIT_AND_EXPR
6667 || TREE_CODE (arg0) == BIT_IOR_EXPR
6668 || TREE_CODE (arg0) == BIT_XOR_EXPR)
6669 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6670 return fold (build (TREE_CODE (arg0), type,
6671 fold (build (code, type,
6672 TREE_OPERAND (arg0, 0), arg1)),
6673 fold (build (code, type,
6674 TREE_OPERAND (arg0, 1), arg1))));
6675
6676 /* Two consecutive rotates adding up to the width of the mode can
6677 be ignored. */
6678 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6679 && TREE_CODE (arg0) == RROTATE_EXPR
6680 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6681 && TREE_INT_CST_HIGH (arg1) == 0
6682 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
6683 && ((TREE_INT_CST_LOW (arg1)
6684 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
6685 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
6686 return TREE_OPERAND (arg0, 0);
6687
6688 goto binary;
6689
6690 case MIN_EXPR:
6691 if (operand_equal_p (arg0, arg1, 0))
6692 return omit_one_operand (type, arg0, arg1);
6693 if (INTEGRAL_TYPE_P (type)
6694 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
6695 return omit_one_operand (type, arg1, arg0);
6696 goto associate;
6697
6698 case MAX_EXPR:
6699 if (operand_equal_p (arg0, arg1, 0))
6700 return omit_one_operand (type, arg0, arg1);
6701 if (INTEGRAL_TYPE_P (type)
6702 && TYPE_MAX_VALUE (type)
6703 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
6704 return omit_one_operand (type, arg1, arg0);
6705 goto associate;
6706
6707 case TRUTH_NOT_EXPR:
6708 /* Note that the operand of this must be an int
6709 and its values must be 0 or 1.
6710 ("true" is a fixed value perhaps depending on the language,
6711 but we don't handle values other than 1 correctly yet.) */
6712 tem = invert_truthvalue (arg0);
6713 /* Avoid infinite recursion. */
6714 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6715 {
6716 tem = fold_single_bit_test (code, arg0, arg1, type);
6717 if (tem)
6718 return tem;
6719 return t;
6720 }
6721 return convert (type, tem);
6722
6723 case TRUTH_ANDIF_EXPR:
6724 /* Note that the operands of this must be ints
6725 and their values must be 0 or 1.
6726 ("true" is a fixed value perhaps depending on the language.) */
6727 /* If first arg is constant zero, return it. */
6728 if (integer_zerop (arg0))
6729 return convert (type, arg0);
6730 case TRUTH_AND_EXPR:
6731 /* If either arg is constant true, drop it. */
6732 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6733 return non_lvalue (convert (type, arg1));
6734 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
6735 /* Preserve sequence points. */
6736 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
6737 return non_lvalue (convert (type, arg0));
6738 /* If second arg is constant zero, result is zero, but first arg
6739 must be evaluated. */
6740 if (integer_zerop (arg1))
6741 return omit_one_operand (type, arg1, arg0);
6742 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
6743 case will be handled here. */
6744 if (integer_zerop (arg0))
6745 return omit_one_operand (type, arg0, arg1);
6746
6747 truth_andor:
6748 /* We only do these simplifications if we are optimizing. */
6749 if (!optimize)
6750 return t;
6751
6752 /* Check for things like (A || B) && (A || C). We can convert this
6753 to A || (B && C). Note that either operator can be any of the four
6754 truth and/or operations and the transformation will still be
6755 valid. Also note that we only care about order for the
6756 ANDIF and ORIF operators. If B contains side effects, this
6757 might change the truth-value of A. */
6758 if (TREE_CODE (arg0) == TREE_CODE (arg1)
6759 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
6760 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
6761 || TREE_CODE (arg0) == TRUTH_AND_EXPR
6762 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
6763 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
6764 {
6765 tree a00 = TREE_OPERAND (arg0, 0);
6766 tree a01 = TREE_OPERAND (arg0, 1);
6767 tree a10 = TREE_OPERAND (arg1, 0);
6768 tree a11 = TREE_OPERAND (arg1, 1);
6769 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
6770 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
6771 && (code == TRUTH_AND_EXPR
6772 || code == TRUTH_OR_EXPR));
6773
6774 if (operand_equal_p (a00, a10, 0))
6775 return fold (build (TREE_CODE (arg0), type, a00,
6776 fold (build (code, type, a01, a11))));
6777 else if (commutative && operand_equal_p (a00, a11, 0))
6778 return fold (build (TREE_CODE (arg0), type, a00,
6779 fold (build (code, type, a01, a10))));
6780 else if (commutative && operand_equal_p (a01, a10, 0))
6781 return fold (build (TREE_CODE (arg0), type, a01,
6782 fold (build (code, type, a00, a11))));
6783
6784 /* This case if tricky because we must either have commutative
6785 operators or else A10 must not have side-effects. */
6786
6787 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
6788 && operand_equal_p (a01, a11, 0))
6789 return fold (build (TREE_CODE (arg0), type,
6790 fold (build (code, type, a00, a10)),
6791 a01));
6792 }
6793
6794 /* See if we can build a range comparison. */
6795 if (0 != (tem = fold_range_test (t)))
6796 return tem;
6797
6798 /* Check for the possibility of merging component references. If our
6799 lhs is another similar operation, try to merge its rhs with our
6800 rhs. Then try to merge our lhs and rhs. */
6801 if (TREE_CODE (arg0) == code
6802 && 0 != (tem = fold_truthop (code, type,
6803 TREE_OPERAND (arg0, 1), arg1)))
6804 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6805
6806 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
6807 return tem;
6808
6809 return t;
6810
6811 case TRUTH_ORIF_EXPR:
6812 /* Note that the operands of this must be ints
6813 and their values must be 0 or true.
6814 ("true" is a fixed value perhaps depending on the language.) */
6815 /* If first arg is constant true, return it. */
6816 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6817 return convert (type, arg0);
6818 case TRUTH_OR_EXPR:
6819 /* If either arg is constant zero, drop it. */
6820 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
6821 return non_lvalue (convert (type, arg1));
6822 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
6823 /* Preserve sequence points. */
6824 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
6825 return non_lvalue (convert (type, arg0));
6826 /* If second arg is constant true, result is true, but we must
6827 evaluate first arg. */
6828 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
6829 return omit_one_operand (type, arg1, arg0);
6830 /* Likewise for first arg, but note this only occurs here for
6831 TRUTH_OR_EXPR. */
6832 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6833 return omit_one_operand (type, arg0, arg1);
6834 goto truth_andor;
6835
6836 case TRUTH_XOR_EXPR:
6837 /* If either arg is constant zero, drop it. */
6838 if (integer_zerop (arg0))
6839 return non_lvalue (convert (type, arg1));
6840 if (integer_zerop (arg1))
6841 return non_lvalue (convert (type, arg0));
6842 /* If either arg is constant true, this is a logical inversion. */
6843 if (integer_onep (arg0))
6844 return non_lvalue (convert (type, invert_truthvalue (arg1)));
6845 if (integer_onep (arg1))
6846 return non_lvalue (convert (type, invert_truthvalue (arg0)));
6847 return t;
6848
6849 case EQ_EXPR:
6850 case NE_EXPR:
6851 case LT_EXPR:
6852 case GT_EXPR:
6853 case LE_EXPR:
6854 case GE_EXPR:
6855 /* If one arg is a real or integer constant, put it last. */
6856 if ((TREE_CODE (arg0) == INTEGER_CST
6857 && TREE_CODE (arg1) != INTEGER_CST)
6858 || (TREE_CODE (arg0) == REAL_CST
6859 && TREE_CODE (arg0) != REAL_CST))
6860 {
6861 if (t == orig_t)
6862 t = copy_node (t);
6863 TREE_OPERAND (t, 0) = arg1;
6864 TREE_OPERAND (t, 1) = arg0;
6865 arg0 = TREE_OPERAND (t, 0);
6866 arg1 = TREE_OPERAND (t, 1);
6867 code = swap_tree_comparison (code);
6868 TREE_SET_CODE (t, code);
6869 }
6870
6871 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
6872 {
6873 tree targ0 = strip_float_extensions (arg0);
6874 tree targ1 = strip_float_extensions (arg1);
6875 tree newtype = TREE_TYPE (targ0);
6876
6877 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
6878 newtype = TREE_TYPE (targ1);
6879
6880 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
6881 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
6882 return fold (build (code, type, convert (newtype, targ0),
6883 convert (newtype, targ1)));
6884
6885 /* (-a) CMP (-b) -> b CMP a */
6886 if (TREE_CODE (arg0) == NEGATE_EXPR
6887 && TREE_CODE (arg1) == NEGATE_EXPR)
6888 return fold (build (code, type, TREE_OPERAND (arg1, 0),
6889 TREE_OPERAND (arg0, 0)));
6890
6891 if (TREE_CODE (arg1) == REAL_CST)
6892 {
6893 REAL_VALUE_TYPE cst;
6894 cst = TREE_REAL_CST (arg1);
6895
6896 /* (-a) CMP CST -> a swap(CMP) (-CST) */
6897 if (TREE_CODE (arg0) == NEGATE_EXPR)
6898 return
6899 fold (build (swap_tree_comparison (code), type,
6900 TREE_OPERAND (arg0, 0),
6901 build_real (TREE_TYPE (arg1),
6902 REAL_VALUE_NEGATE (cst))));
6903
6904 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
6905 /* a CMP (-0) -> a CMP 0 */
6906 if (REAL_VALUE_MINUS_ZERO (cst))
6907 return fold (build (code, type, arg0,
6908 build_real (TREE_TYPE (arg1), dconst0)));
6909
6910 /* x != NaN is always true, other ops are always false. */
6911 if (REAL_VALUE_ISNAN (cst)
6912 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
6913 {
6914 t = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
6915 return omit_one_operand (type, convert (type, t), arg0);
6916 }
6917
6918 /* Fold comparisons against infinity. */
6919 if (REAL_VALUE_ISINF (cst))
6920 {
6921 tem = fold_inf_compare (code, type, arg0, arg1);
6922 if (tem != NULL_TREE)
6923 return tem;
6924 }
6925 }
6926
6927 /* If this is a comparison of a real constant with a PLUS_EXPR
6928 or a MINUS_EXPR of a real constant, we can convert it into a
6929 comparison with a revised real constant as long as no overflow
6930 occurs when unsafe_math_optimizations are enabled. */
6931 if (flag_unsafe_math_optimizations
6932 && TREE_CODE (arg1) == REAL_CST
6933 && (TREE_CODE (arg0) == PLUS_EXPR
6934 || TREE_CODE (arg0) == MINUS_EXPR)
6935 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6936 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6937 ? MINUS_EXPR : PLUS_EXPR,
6938 arg1, TREE_OPERAND (arg0, 1), 0))
6939 && ! TREE_CONSTANT_OVERFLOW (tem))
6940 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6941
6942 /* Likewise, we can simplify a comparison of a real constant with
6943 a MINUS_EXPR whose first operand is also a real constant, i.e.
6944 (c1 - x) < c2 becomes x > c1-c2. */
6945 if (flag_unsafe_math_optimizations
6946 && TREE_CODE (arg1) == REAL_CST
6947 && TREE_CODE (arg0) == MINUS_EXPR
6948 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
6949 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
6950 arg1, 0))
6951 && ! TREE_CONSTANT_OVERFLOW (tem))
6952 return fold (build (swap_tree_comparison (code), type,
6953 TREE_OPERAND (arg0, 1), tem));
6954
6955 /* Fold comparisons against built-in math functions. */
6956 if (TREE_CODE (arg1) == REAL_CST
6957 && flag_unsafe_math_optimizations
6958 && ! flag_errno_math)
6959 {
6960 enum built_in_function fcode = builtin_mathfn_code (arg0);
6961
6962 if (fcode != END_BUILTINS)
6963 {
6964 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
6965 if (tem != NULL_TREE)
6966 return tem;
6967 }
6968 }
6969 }
6970
6971 /* Convert foo++ == CONST into ++foo == CONST + INCR.
6972 First, see if one arg is constant; find the constant arg
6973 and the other one. */
6974 {
6975 tree constop = 0, varop = NULL_TREE;
6976 int constopnum = -1;
6977
6978 if (TREE_CONSTANT (arg1))
6979 constopnum = 1, constop = arg1, varop = arg0;
6980 if (TREE_CONSTANT (arg0))
6981 constopnum = 0, constop = arg0, varop = arg1;
6982
6983 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
6984 {
6985 /* This optimization is invalid for ordered comparisons
6986 if CONST+INCR overflows or if foo+incr might overflow.
6987 This optimization is invalid for floating point due to rounding.
6988 For pointer types we assume overflow doesn't happen. */
6989 if (POINTER_TYPE_P (TREE_TYPE (varop))
6990 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6991 && (code == EQ_EXPR || code == NE_EXPR)))
6992 {
6993 tree newconst
6994 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
6995 constop, TREE_OPERAND (varop, 1)));
6996
6997 /* Do not overwrite the current varop to be a preincrement,
6998 create a new node so that we won't confuse our caller who
6999 might create trees and throw them away, reusing the
7000 arguments that they passed to build. This shows up in
7001 the THEN or ELSE parts of ?: being postincrements. */
7002 varop = build (PREINCREMENT_EXPR, TREE_TYPE (varop),
7003 TREE_OPERAND (varop, 0),
7004 TREE_OPERAND (varop, 1));
7005
7006 /* If VAROP is a reference to a bitfield, we must mask
7007 the constant by the width of the field. */
7008 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7009 && DECL_BIT_FIELD(TREE_OPERAND
7010 (TREE_OPERAND (varop, 0), 1)))
7011 {
7012 int size
7013 = TREE_INT_CST_LOW (DECL_SIZE
7014 (TREE_OPERAND
7015 (TREE_OPERAND (varop, 0), 1)));
7016 tree mask, unsigned_type;
7017 unsigned int precision;
7018 tree folded_compare;
7019
7020 /* First check whether the comparison would come out
7021 always the same. If we don't do that we would
7022 change the meaning with the masking. */
7023 if (constopnum == 0)
7024 folded_compare = fold (build (code, type, constop,
7025 TREE_OPERAND (varop, 0)));
7026 else
7027 folded_compare = fold (build (code, type,
7028 TREE_OPERAND (varop, 0),
7029 constop));
7030 if (integer_zerop (folded_compare)
7031 || integer_onep (folded_compare))
7032 return omit_one_operand (type, folded_compare, varop);
7033
7034 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
7035 precision = TYPE_PRECISION (unsigned_type);
7036 mask = build_int_2 (~0, ~0);
7037 TREE_TYPE (mask) = unsigned_type;
7038 force_fit_type (mask, 0);
7039 mask = const_binop (RSHIFT_EXPR, mask,
7040 size_int (precision - size), 0);
7041 newconst = fold (build (BIT_AND_EXPR,
7042 TREE_TYPE (varop), newconst,
7043 convert (TREE_TYPE (varop),
7044 mask)));
7045 }
7046
7047 t = build (code, type,
7048 (constopnum == 0) ? newconst : varop,
7049 (constopnum == 1) ? newconst : varop);
7050 return t;
7051 }
7052 }
7053 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
7054 {
7055 if (POINTER_TYPE_P (TREE_TYPE (varop))
7056 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
7057 && (code == EQ_EXPR || code == NE_EXPR)))
7058 {
7059 tree newconst
7060 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
7061 constop, TREE_OPERAND (varop, 1)));
7062
7063 /* Do not overwrite the current varop to be a predecrement,
7064 create a new node so that we won't confuse our caller who
7065 might create trees and throw them away, reusing the
7066 arguments that they passed to build. This shows up in
7067 the THEN or ELSE parts of ?: being postdecrements. */
7068 varop = build (PREDECREMENT_EXPR, TREE_TYPE (varop),
7069 TREE_OPERAND (varop, 0),
7070 TREE_OPERAND (varop, 1));
7071
7072 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7073 && DECL_BIT_FIELD(TREE_OPERAND
7074 (TREE_OPERAND (varop, 0), 1)))
7075 {
7076 int size
7077 = TREE_INT_CST_LOW (DECL_SIZE
7078 (TREE_OPERAND
7079 (TREE_OPERAND (varop, 0), 1)));
7080 tree mask, unsigned_type;
7081 unsigned int precision;
7082 tree folded_compare;
7083
7084 if (constopnum == 0)
7085 folded_compare = fold (build (code, type, constop,
7086 TREE_OPERAND (varop, 0)));
7087 else
7088 folded_compare = fold (build (code, type,
7089 TREE_OPERAND (varop, 0),
7090 constop));
7091 if (integer_zerop (folded_compare)
7092 || integer_onep (folded_compare))
7093 return omit_one_operand (type, folded_compare, varop);
7094
7095 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
7096 precision = TYPE_PRECISION (unsigned_type);
7097 mask = build_int_2 (~0, ~0);
7098 TREE_TYPE (mask) = TREE_TYPE (varop);
7099 force_fit_type (mask, 0);
7100 mask = const_binop (RSHIFT_EXPR, mask,
7101 size_int (precision - size), 0);
7102 newconst = fold (build (BIT_AND_EXPR,
7103 TREE_TYPE (varop), newconst,
7104 convert (TREE_TYPE (varop),
7105 mask)));
7106 }
7107
7108 t = build (code, type,
7109 (constopnum == 0) ? newconst : varop,
7110 (constopnum == 1) ? newconst : varop);
7111 return t;
7112 }
7113 }
7114 }
7115
7116 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7117 This transformation affects the cases which are handled in later
7118 optimizations involving comparisons with non-negative constants. */
7119 if (TREE_CODE (arg1) == INTEGER_CST
7120 && TREE_CODE (arg0) != INTEGER_CST
7121 && tree_int_cst_sgn (arg1) > 0)
7122 {
7123 switch (code)
7124 {
7125 case GE_EXPR:
7126 code = GT_EXPR;
7127 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7128 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7129 break;
7130
7131 case LT_EXPR:
7132 code = LE_EXPR;
7133 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7134 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7135 break;
7136
7137 default:
7138 break;
7139 }
7140 }
7141
7142 /* Comparisons with the highest or lowest possible integer of
7143 the specified size will have known values. */
7144 {
7145 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7146
7147 if (TREE_CODE (arg1) == INTEGER_CST
7148 && ! TREE_CONSTANT_OVERFLOW (arg1)
7149 && width <= HOST_BITS_PER_WIDE_INT
7150 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7151 || POINTER_TYPE_P (TREE_TYPE (arg1))))
7152 {
7153 unsigned HOST_WIDE_INT signed_max;
7154 unsigned HOST_WIDE_INT max, min;
7155
7156 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7157
7158 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7159 {
7160 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7161 min = 0;
7162 }
7163 else
7164 {
7165 max = signed_max;
7166 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7167 }
7168
7169 if (TREE_INT_CST_HIGH (arg1) == 0
7170 && TREE_INT_CST_LOW (arg1) == max)
7171 switch (code)
7172 {
7173 case GT_EXPR:
7174 return omit_one_operand (type,
7175 convert (type, integer_zero_node),
7176 arg0);
7177 case GE_EXPR:
7178 code = EQ_EXPR;
7179 if (t == orig_t)
7180 t = copy_node (t);
7181 TREE_SET_CODE (t, EQ_EXPR);
7182 break;
7183 case LE_EXPR:
7184 return omit_one_operand (type,
7185 convert (type, integer_one_node),
7186 arg0);
7187 case LT_EXPR:
7188 code = NE_EXPR;
7189 if (t == orig_t)
7190 t = copy_node (t);
7191 TREE_SET_CODE (t, NE_EXPR);
7192 break;
7193
7194 /* The GE_EXPR and LT_EXPR cases above are not normally
7195 reached because of previous transformations. */
7196
7197 default:
7198 break;
7199 }
7200 else if (TREE_INT_CST_HIGH (arg1) == 0
7201 && TREE_INT_CST_LOW (arg1) == max - 1)
7202 switch (code)
7203 {
7204 case GT_EXPR:
7205 code = EQ_EXPR;
7206 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7207 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7208 break;
7209 case LE_EXPR:
7210 code = NE_EXPR;
7211 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7212 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7213 break;
7214 default:
7215 break;
7216 }
7217 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7218 && TREE_INT_CST_LOW (arg1) == min)
7219 switch (code)
7220 {
7221 case LT_EXPR:
7222 return omit_one_operand (type,
7223 convert (type, integer_zero_node),
7224 arg0);
7225 case LE_EXPR:
7226 code = EQ_EXPR;
7227 if (t == orig_t)
7228 t = copy_node (t);
7229 TREE_SET_CODE (t, EQ_EXPR);
7230 break;
7231
7232 case GE_EXPR:
7233 return omit_one_operand (type,
7234 convert (type, integer_one_node),
7235 arg0);
7236 case GT_EXPR:
7237 code = NE_EXPR;
7238 if (t == orig_t)
7239 t = copy_node (t);
7240 TREE_SET_CODE (t, NE_EXPR);
7241 break;
7242
7243 default:
7244 break;
7245 }
7246 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7247 && TREE_INT_CST_LOW (arg1) == min + 1)
7248 switch (code)
7249 {
7250 case GE_EXPR:
7251 code = NE_EXPR;
7252 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7253 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7254 break;
7255 case LT_EXPR:
7256 code = EQ_EXPR;
7257 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7258 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7259 break;
7260 default:
7261 break;
7262 }
7263
7264 else if (TREE_INT_CST_HIGH (arg1) == 0
7265 && TREE_INT_CST_LOW (arg1) == signed_max
7266 && TREE_UNSIGNED (TREE_TYPE (arg1))
7267 /* signed_type does not work on pointer types. */
7268 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7269 {
7270 /* The following case also applies to X < signed_max+1
7271 and X >= signed_max+1 because previous transformations. */
7272 if (code == LE_EXPR || code == GT_EXPR)
7273 {
7274 tree st0, st1;
7275 st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
7276 st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
7277 return fold
7278 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7279 type, convert (st0, arg0),
7280 convert (st1, integer_zero_node)));
7281 }
7282 }
7283 }
7284 }
7285
7286 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7287 a MINUS_EXPR of a constant, we can convert it into a comparison with
7288 a revised constant as long as no overflow occurs. */
7289 if ((code == EQ_EXPR || code == NE_EXPR)
7290 && TREE_CODE (arg1) == INTEGER_CST
7291 && (TREE_CODE (arg0) == PLUS_EXPR
7292 || TREE_CODE (arg0) == MINUS_EXPR)
7293 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7294 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7295 ? MINUS_EXPR : PLUS_EXPR,
7296 arg1, TREE_OPERAND (arg0, 1), 0))
7297 && ! TREE_CONSTANT_OVERFLOW (tem))
7298 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7299
7300 /* Similarly for a NEGATE_EXPR. */
7301 else if ((code == EQ_EXPR || code == NE_EXPR)
7302 && TREE_CODE (arg0) == NEGATE_EXPR
7303 && TREE_CODE (arg1) == INTEGER_CST
7304 && 0 != (tem = negate_expr (arg1))
7305 && TREE_CODE (tem) == INTEGER_CST
7306 && ! TREE_CONSTANT_OVERFLOW (tem))
7307 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7308
7309 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7310 for !=. Don't do this for ordered comparisons due to overflow. */
7311 else if ((code == NE_EXPR || code == EQ_EXPR)
7312 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7313 return fold (build (code, type,
7314 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7315
7316 /* If we are widening one operand of an integer comparison,
7317 see if the other operand is similarly being widened. Perhaps we
7318 can do the comparison in the narrower type. */
7319 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7320 && TREE_CODE (arg0) == NOP_EXPR
7321 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7322 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7323 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7324 || (TREE_CODE (t1) == INTEGER_CST
7325 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7326 return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
7327
7328 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7329 constant, we can simplify it. */
7330 else if (TREE_CODE (arg1) == INTEGER_CST
7331 && (TREE_CODE (arg0) == MIN_EXPR
7332 || TREE_CODE (arg0) == MAX_EXPR)
7333 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7334 return optimize_minmax_comparison (t);
7335
7336 /* If we are comparing an ABS_EXPR with a constant, we can
7337 convert all the cases into explicit comparisons, but they may
7338 well not be faster than doing the ABS and one comparison.
7339 But ABS (X) <= C is a range comparison, which becomes a subtraction
7340 and a comparison, and is probably faster. */
7341 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7342 && TREE_CODE (arg0) == ABS_EXPR
7343 && ! TREE_SIDE_EFFECTS (arg0)
7344 && (0 != (tem = negate_expr (arg1)))
7345 && TREE_CODE (tem) == INTEGER_CST
7346 && ! TREE_CONSTANT_OVERFLOW (tem))
7347 return fold (build (TRUTH_ANDIF_EXPR, type,
7348 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
7349 build (LE_EXPR, type,
7350 TREE_OPERAND (arg0, 0), arg1)));
7351
7352 /* If this is an EQ or NE comparison with zero and ARG0 is
7353 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7354 two operations, but the latter can be done in one less insn
7355 on machines that have only two-operand insns or on which a
7356 constant cannot be the first operand. */
7357 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7358 && TREE_CODE (arg0) == BIT_AND_EXPR)
7359 {
7360 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
7361 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
7362 return
7363 fold (build (code, type,
7364 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7365 build (RSHIFT_EXPR,
7366 TREE_TYPE (TREE_OPERAND (arg0, 0)),
7367 TREE_OPERAND (arg0, 1),
7368 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
7369 convert (TREE_TYPE (arg0),
7370 integer_one_node)),
7371 arg1));
7372 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7373 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7374 return
7375 fold (build (code, type,
7376 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7377 build (RSHIFT_EXPR,
7378 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7379 TREE_OPERAND (arg0, 0),
7380 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
7381 convert (TREE_TYPE (arg0),
7382 integer_one_node)),
7383 arg1));
7384 }
7385
7386 /* If this is an NE or EQ comparison of zero against the result of a
7387 signed MOD operation whose second operand is a power of 2, make
7388 the MOD operation unsigned since it is simpler and equivalent. */
7389 if ((code == NE_EXPR || code == EQ_EXPR)
7390 && integer_zerop (arg1)
7391 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
7392 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7393 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7394 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7395 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7396 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7397 {
7398 tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
7399 tree newmod = build (TREE_CODE (arg0), newtype,
7400 convert (newtype, TREE_OPERAND (arg0, 0)),
7401 convert (newtype, TREE_OPERAND (arg0, 1)));
7402
7403 return build (code, type, newmod, convert (newtype, arg1));
7404 }
7405
7406 /* If this is an NE comparison of zero with an AND of one, remove the
7407 comparison since the AND will give the correct value. */
7408 if (code == NE_EXPR && integer_zerop (arg1)
7409 && TREE_CODE (arg0) == BIT_AND_EXPR
7410 && integer_onep (TREE_OPERAND (arg0, 1)))
7411 return convert (type, arg0);
7412
7413 /* If we have (A & C) == C where C is a power of 2, convert this into
7414 (A & C) != 0. Similarly for NE_EXPR. */
7415 if ((code == EQ_EXPR || code == NE_EXPR)
7416 && TREE_CODE (arg0) == BIT_AND_EXPR
7417 && integer_pow2p (TREE_OPERAND (arg0, 1))
7418 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7419 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7420 arg0, integer_zero_node));
7421
7422 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7423 2, then fold the expression into shifts and logical operations. */
7424 tem = fold_single_bit_test (code, arg0, arg1, type);
7425 if (tem)
7426 return tem;
7427
7428 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7429 Similarly for NE_EXPR. */
7430 if ((code == EQ_EXPR || code == NE_EXPR)
7431 && TREE_CODE (arg0) == BIT_AND_EXPR
7432 && TREE_CODE (arg1) == INTEGER_CST
7433 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7434 {
7435 tree dandnotc
7436 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7437 arg1, build1 (BIT_NOT_EXPR,
7438 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7439 TREE_OPERAND (arg0, 1))));
7440 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7441 if (integer_nonzerop (dandnotc))
7442 return omit_one_operand (type, rslt, arg0);
7443 }
7444
7445 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7446 Similarly for NE_EXPR. */
7447 if ((code == EQ_EXPR || code == NE_EXPR)
7448 && TREE_CODE (arg0) == BIT_IOR_EXPR
7449 && TREE_CODE (arg1) == INTEGER_CST
7450 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7451 {
7452 tree candnotd
7453 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7454 TREE_OPERAND (arg0, 1),
7455 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
7456 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7457 if (integer_nonzerop (candnotd))
7458 return omit_one_operand (type, rslt, arg0);
7459 }
7460
7461 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7462 and similarly for >= into !=. */
7463 if ((code == LT_EXPR || code == GE_EXPR)
7464 && TREE_UNSIGNED (TREE_TYPE (arg0))
7465 && TREE_CODE (arg1) == LSHIFT_EXPR
7466 && integer_onep (TREE_OPERAND (arg1, 0)))
7467 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7468 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7469 TREE_OPERAND (arg1, 1)),
7470 convert (TREE_TYPE (arg0), integer_zero_node));
7471
7472 else if ((code == LT_EXPR || code == GE_EXPR)
7473 && TREE_UNSIGNED (TREE_TYPE (arg0))
7474 && (TREE_CODE (arg1) == NOP_EXPR
7475 || TREE_CODE (arg1) == CONVERT_EXPR)
7476 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7477 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7478 return
7479 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7480 convert (TREE_TYPE (arg0),
7481 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7482 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
7483 convert (TREE_TYPE (arg0), integer_zero_node));
7484
7485 /* Simplify comparison of something with itself. (For IEEE
7486 floating-point, we can only do some of these simplifications.) */
7487 if (operand_equal_p (arg0, arg1, 0))
7488 {
7489 switch (code)
7490 {
7491 case EQ_EXPR:
7492 case GE_EXPR:
7493 case LE_EXPR:
7494 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7495 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7496 return constant_boolean_node (1, type);
7497 code = EQ_EXPR;
7498 if (t == orig_t)
7499 t = copy_node (t);
7500 TREE_SET_CODE (t, code);
7501 break;
7502
7503 case NE_EXPR:
7504 /* For NE, we can only do this simplification if integer
7505 or we don't honor IEEE floating point NaNs. */
7506 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7507 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7508 break;
7509 /* ... fall through ... */
7510 case GT_EXPR:
7511 case LT_EXPR:
7512 return constant_boolean_node (0, type);
7513 default:
7514 abort ();
7515 }
7516 }
7517
7518 /* If we are comparing an expression that just has comparisons
7519 of two integer values, arithmetic expressions of those comparisons,
7520 and constants, we can simplify it. There are only three cases
7521 to check: the two values can either be equal, the first can be
7522 greater, or the second can be greater. Fold the expression for
7523 those three values. Since each value must be 0 or 1, we have
7524 eight possibilities, each of which corresponds to the constant 0
7525 or 1 or one of the six possible comparisons.
7526
7527 This handles common cases like (a > b) == 0 but also handles
7528 expressions like ((x > y) - (y > x)) > 0, which supposedly
7529 occur in macroized code. */
7530
7531 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7532 {
7533 tree cval1 = 0, cval2 = 0;
7534 int save_p = 0;
7535
7536 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7537 /* Don't handle degenerate cases here; they should already
7538 have been handled anyway. */
7539 && cval1 != 0 && cval2 != 0
7540 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7541 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7542 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7543 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7544 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7545 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7546 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7547 {
7548 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7549 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7550
7551 /* We can't just pass T to eval_subst in case cval1 or cval2
7552 was the same as ARG1. */
7553
7554 tree high_result
7555 = fold (build (code, type,
7556 eval_subst (arg0, cval1, maxval, cval2, minval),
7557 arg1));
7558 tree equal_result
7559 = fold (build (code, type,
7560 eval_subst (arg0, cval1, maxval, cval2, maxval),
7561 arg1));
7562 tree low_result
7563 = fold (build (code, type,
7564 eval_subst (arg0, cval1, minval, cval2, maxval),
7565 arg1));
7566
7567 /* All three of these results should be 0 or 1. Confirm they
7568 are. Then use those values to select the proper code
7569 to use. */
7570
7571 if ((integer_zerop (high_result)
7572 || integer_onep (high_result))
7573 && (integer_zerop (equal_result)
7574 || integer_onep (equal_result))
7575 && (integer_zerop (low_result)
7576 || integer_onep (low_result)))
7577 {
7578 /* Make a 3-bit mask with the high-order bit being the
7579 value for `>', the next for '=', and the low for '<'. */
7580 switch ((integer_onep (high_result) * 4)
7581 + (integer_onep (equal_result) * 2)
7582 + integer_onep (low_result))
7583 {
7584 case 0:
7585 /* Always false. */
7586 return omit_one_operand (type, integer_zero_node, arg0);
7587 case 1:
7588 code = LT_EXPR;
7589 break;
7590 case 2:
7591 code = EQ_EXPR;
7592 break;
7593 case 3:
7594 code = LE_EXPR;
7595 break;
7596 case 4:
7597 code = GT_EXPR;
7598 break;
7599 case 5:
7600 code = NE_EXPR;
7601 break;
7602 case 6:
7603 code = GE_EXPR;
7604 break;
7605 case 7:
7606 /* Always true. */
7607 return omit_one_operand (type, integer_one_node, arg0);
7608 }
7609
7610 t = build (code, type, cval1, cval2);
7611 if (save_p)
7612 return save_expr (t);
7613 else
7614 return fold (t);
7615 }
7616 }
7617 }
7618
7619 /* If this is a comparison of a field, we may be able to simplify it. */
7620 if (((TREE_CODE (arg0) == COMPONENT_REF
7621 && (*lang_hooks.can_use_bit_fields_p) ())
7622 || TREE_CODE (arg0) == BIT_FIELD_REF)
7623 && (code == EQ_EXPR || code == NE_EXPR)
7624 /* Handle the constant case even without -O
7625 to make sure the warnings are given. */
7626 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
7627 {
7628 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
7629 return t1 ? t1 : t;
7630 }
7631
7632 /* If this is a comparison of complex values and either or both sides
7633 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7634 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7635 This may prevent needless evaluations. */
7636 if ((code == EQ_EXPR || code == NE_EXPR)
7637 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
7638 && (TREE_CODE (arg0) == COMPLEX_EXPR
7639 || TREE_CODE (arg1) == COMPLEX_EXPR
7640 || TREE_CODE (arg0) == COMPLEX_CST
7641 || TREE_CODE (arg1) == COMPLEX_CST))
7642 {
7643 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
7644 tree real0, imag0, real1, imag1;
7645
7646 arg0 = save_expr (arg0);
7647 arg1 = save_expr (arg1);
7648 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
7649 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
7650 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
7651 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
7652
7653 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
7654 : TRUTH_ORIF_EXPR),
7655 type,
7656 fold (build (code, type, real0, real1)),
7657 fold (build (code, type, imag0, imag1))));
7658 }
7659
7660 /* Optimize comparisons of strlen vs zero to a compare of the
7661 first character of the string vs zero. To wit,
7662 strlen(ptr) == 0 => *ptr == 0
7663 strlen(ptr) != 0 => *ptr != 0
7664 Other cases should reduce to one of these two (or a constant)
7665 due to the return value of strlen being unsigned. */
7666 if ((code == EQ_EXPR || code == NE_EXPR)
7667 && integer_zerop (arg1)
7668 && TREE_CODE (arg0) == CALL_EXPR)
7669 {
7670 tree fndecl = get_callee_fndecl (arg0);
7671 tree arglist;
7672
7673 if (fndecl
7674 && DECL_BUILT_IN (fndecl)
7675 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
7676 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
7677 && (arglist = TREE_OPERAND (arg0, 1))
7678 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
7679 && ! TREE_CHAIN (arglist))
7680 return fold (build (code, type,
7681 build1 (INDIRECT_REF, char_type_node,
7682 TREE_VALUE(arglist)),
7683 integer_zero_node));
7684 }
7685
7686 /* From here on, the only cases we handle are when the result is
7687 known to be a constant.
7688
7689 To compute GT, swap the arguments and do LT.
7690 To compute GE, do LT and invert the result.
7691 To compute LE, swap the arguments, do LT and invert the result.
7692 To compute NE, do EQ and invert the result.
7693
7694 Therefore, the code below must handle only EQ and LT. */
7695
7696 if (code == LE_EXPR || code == GT_EXPR)
7697 {
7698 tem = arg0, arg0 = arg1, arg1 = tem;
7699 code = swap_tree_comparison (code);
7700 }
7701
7702 /* Note that it is safe to invert for real values here because we
7703 will check below in the one case that it matters. */
7704
7705 t1 = NULL_TREE;
7706 invert = 0;
7707 if (code == NE_EXPR || code == GE_EXPR)
7708 {
7709 invert = 1;
7710 code = invert_tree_comparison (code);
7711 }
7712
7713 /* Compute a result for LT or EQ if args permit;
7714 otherwise return T. */
7715 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
7716 {
7717 if (code == EQ_EXPR)
7718 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
7719 else
7720 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
7721 ? INT_CST_LT_UNSIGNED (arg0, arg1)
7722 : INT_CST_LT (arg0, arg1)),
7723 0);
7724 }
7725
7726 #if 0 /* This is no longer useful, but breaks some real code. */
7727 /* Assume a nonexplicit constant cannot equal an explicit one,
7728 since such code would be undefined anyway.
7729 Exception: on sysvr4, using #pragma weak,
7730 a label can come out as 0. */
7731 else if (TREE_CODE (arg1) == INTEGER_CST
7732 && !integer_zerop (arg1)
7733 && TREE_CONSTANT (arg0)
7734 && TREE_CODE (arg0) == ADDR_EXPR
7735 && code == EQ_EXPR)
7736 t1 = build_int_2 (0, 0);
7737 #endif
7738 /* Two real constants can be compared explicitly. */
7739 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
7740 {
7741 /* If either operand is a NaN, the result is false with two
7742 exceptions: First, an NE_EXPR is true on NaNs, but that case
7743 is already handled correctly since we will be inverting the
7744 result for NE_EXPR. Second, if we had inverted a LE_EXPR
7745 or a GE_EXPR into a LT_EXPR, we must return true so that it
7746 will be inverted into false. */
7747
7748 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
7749 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
7750 t1 = build_int_2 (invert && code == LT_EXPR, 0);
7751
7752 else if (code == EQ_EXPR)
7753 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
7754 TREE_REAL_CST (arg1)),
7755 0);
7756 else
7757 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
7758 TREE_REAL_CST (arg1)),
7759 0);
7760 }
7761
7762 if (t1 == NULL_TREE)
7763 return t;
7764
7765 if (invert)
7766 TREE_INT_CST_LOW (t1) ^= 1;
7767
7768 TREE_TYPE (t1) = type;
7769 if (TREE_CODE (type) == BOOLEAN_TYPE)
7770 return (*lang_hooks.truthvalue_conversion) (t1);
7771 return t1;
7772
7773 case COND_EXPR:
7774 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
7775 so all simple results must be passed through pedantic_non_lvalue. */
7776 if (TREE_CODE (arg0) == INTEGER_CST)
7777 return pedantic_non_lvalue
7778 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
7779 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
7780 return pedantic_omit_one_operand (type, arg1, arg0);
7781
7782 /* If the second operand is zero, invert the comparison and swap
7783 the second and third operands. Likewise if the second operand
7784 is constant and the third is not or if the third operand is
7785 equivalent to the first operand of the comparison. */
7786
7787 if (integer_zerop (arg1)
7788 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
7789 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7790 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7791 TREE_OPERAND (t, 2),
7792 TREE_OPERAND (arg0, 1))))
7793 {
7794 /* See if this can be inverted. If it can't, possibly because
7795 it was a floating-point inequality comparison, don't do
7796 anything. */
7797 tem = invert_truthvalue (arg0);
7798
7799 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7800 {
7801 t = build (code, type, tem,
7802 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
7803 arg0 = tem;
7804 /* arg1 should be the first argument of the new T. */
7805 arg1 = TREE_OPERAND (t, 1);
7806 STRIP_NOPS (arg1);
7807 }
7808 }
7809
7810 /* If we have A op B ? A : C, we may be able to convert this to a
7811 simpler expression, depending on the operation and the values
7812 of B and C. Signed zeros prevent all of these transformations,
7813 for reasons given above each one. */
7814
7815 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7816 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7817 arg1, TREE_OPERAND (arg0, 1))
7818 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
7819 {
7820 tree arg2 = TREE_OPERAND (t, 2);
7821 enum tree_code comp_code = TREE_CODE (arg0);
7822
7823 STRIP_NOPS (arg2);
7824
7825 /* If we have A op 0 ? A : -A, consider applying the following
7826 transformations:
7827
7828 A == 0? A : -A same as -A
7829 A != 0? A : -A same as A
7830 A >= 0? A : -A same as abs (A)
7831 A > 0? A : -A same as abs (A)
7832 A <= 0? A : -A same as -abs (A)
7833 A < 0? A : -A same as -abs (A)
7834
7835 None of these transformations work for modes with signed
7836 zeros. If A is +/-0, the first two transformations will
7837 change the sign of the result (from +0 to -0, or vice
7838 versa). The last four will fix the sign of the result,
7839 even though the original expressions could be positive or
7840 negative, depending on the sign of A.
7841
7842 Note that all these transformations are correct if A is
7843 NaN, since the two alternatives (A and -A) are also NaNs. */
7844 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
7845 ? real_zerop (TREE_OPERAND (arg0, 1))
7846 : integer_zerop (TREE_OPERAND (arg0, 1)))
7847 && TREE_CODE (arg2) == NEGATE_EXPR
7848 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
7849 switch (comp_code)
7850 {
7851 case EQ_EXPR:
7852 return
7853 pedantic_non_lvalue
7854 (convert (type,
7855 negate_expr
7856 (convert (TREE_TYPE (TREE_OPERAND (t, 1)),
7857 arg1))));
7858 case NE_EXPR:
7859 return pedantic_non_lvalue (convert (type, arg1));
7860 case GE_EXPR:
7861 case GT_EXPR:
7862 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7863 arg1 = convert ((*lang_hooks.types.signed_type)
7864 (TREE_TYPE (arg1)), arg1);
7865 return pedantic_non_lvalue
7866 (convert (type, fold (build1 (ABS_EXPR,
7867 TREE_TYPE (arg1), arg1))));
7868 case LE_EXPR:
7869 case LT_EXPR:
7870 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7871 arg1 = convert ((lang_hooks.types.signed_type)
7872 (TREE_TYPE (arg1)), arg1);
7873 return pedantic_non_lvalue
7874 (negate_expr (convert (type,
7875 fold (build1 (ABS_EXPR,
7876 TREE_TYPE (arg1),
7877 arg1)))));
7878 default:
7879 abort ();
7880 }
7881
7882 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
7883 A == 0 ? A : 0 is always 0 unless A is -0. Note that
7884 both transformations are correct when A is NaN: A != 0
7885 is then true, and A == 0 is false. */
7886
7887 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
7888 {
7889 if (comp_code == NE_EXPR)
7890 return pedantic_non_lvalue (convert (type, arg1));
7891 else if (comp_code == EQ_EXPR)
7892 return pedantic_non_lvalue (convert (type, integer_zero_node));
7893 }
7894
7895 /* Try some transformations of A op B ? A : B.
7896
7897 A == B? A : B same as B
7898 A != B? A : B same as A
7899 A >= B? A : B same as max (A, B)
7900 A > B? A : B same as max (B, A)
7901 A <= B? A : B same as min (A, B)
7902 A < B? A : B same as min (B, A)
7903
7904 As above, these transformations don't work in the presence
7905 of signed zeros. For example, if A and B are zeros of
7906 opposite sign, the first two transformations will change
7907 the sign of the result. In the last four, the original
7908 expressions give different results for (A=+0, B=-0) and
7909 (A=-0, B=+0), but the transformed expressions do not.
7910
7911 The first two transformations are correct if either A or B
7912 is a NaN. In the first transformation, the condition will
7913 be false, and B will indeed be chosen. In the case of the
7914 second transformation, the condition A != B will be true,
7915 and A will be chosen.
7916
7917 The conversions to max() and min() are not correct if B is
7918 a number and A is not. The conditions in the original
7919 expressions will be false, so all four give B. The min()
7920 and max() versions would give a NaN instead. */
7921 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
7922 arg2, TREE_OPERAND (arg0, 0)))
7923 {
7924 tree comp_op0 = TREE_OPERAND (arg0, 0);
7925 tree comp_op1 = TREE_OPERAND (arg0, 1);
7926 tree comp_type = TREE_TYPE (comp_op0);
7927
7928 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
7929 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
7930 {
7931 comp_type = type;
7932 comp_op0 = arg1;
7933 comp_op1 = arg2;
7934 }
7935
7936 switch (comp_code)
7937 {
7938 case EQ_EXPR:
7939 return pedantic_non_lvalue (convert (type, arg2));
7940 case NE_EXPR:
7941 return pedantic_non_lvalue (convert (type, arg1));
7942 case LE_EXPR:
7943 case LT_EXPR:
7944 /* In C++ a ?: expression can be an lvalue, so put the
7945 operand which will be used if they are equal first
7946 so that we can convert this back to the
7947 corresponding COND_EXPR. */
7948 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
7949 return pedantic_non_lvalue
7950 (convert (type, fold (build (MIN_EXPR, comp_type,
7951 (comp_code == LE_EXPR
7952 ? comp_op0 : comp_op1),
7953 (comp_code == LE_EXPR
7954 ? comp_op1 : comp_op0)))));
7955 break;
7956 case GE_EXPR:
7957 case GT_EXPR:
7958 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
7959 return pedantic_non_lvalue
7960 (convert (type, fold (build (MAX_EXPR, comp_type,
7961 (comp_code == GE_EXPR
7962 ? comp_op0 : comp_op1),
7963 (comp_code == GE_EXPR
7964 ? comp_op1 : comp_op0)))));
7965 break;
7966 default:
7967 abort ();
7968 }
7969 }
7970
7971 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
7972 we might still be able to simplify this. For example,
7973 if C1 is one less or one more than C2, this might have started
7974 out as a MIN or MAX and been transformed by this function.
7975 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
7976
7977 if (INTEGRAL_TYPE_P (type)
7978 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7979 && TREE_CODE (arg2) == INTEGER_CST)
7980 switch (comp_code)
7981 {
7982 case EQ_EXPR:
7983 /* We can replace A with C1 in this case. */
7984 arg1 = convert (type, TREE_OPERAND (arg0, 1));
7985 t = build (code, type, TREE_OPERAND (t, 0), arg1,
7986 TREE_OPERAND (t, 2));
7987 break;
7988
7989 case LT_EXPR:
7990 /* If C1 is C2 + 1, this is min(A, C2). */
7991 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
7992 && operand_equal_p (TREE_OPERAND (arg0, 1),
7993 const_binop (PLUS_EXPR, arg2,
7994 integer_one_node, 0), 1))
7995 return pedantic_non_lvalue
7996 (fold (build (MIN_EXPR, type, arg1, arg2)));
7997 break;
7998
7999 case LE_EXPR:
8000 /* If C1 is C2 - 1, this is min(A, C2). */
8001 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8002 && operand_equal_p (TREE_OPERAND (arg0, 1),
8003 const_binop (MINUS_EXPR, arg2,
8004 integer_one_node, 0), 1))
8005 return pedantic_non_lvalue
8006 (fold (build (MIN_EXPR, type, arg1, arg2)));
8007 break;
8008
8009 case GT_EXPR:
8010 /* If C1 is C2 - 1, this is max(A, C2). */
8011 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8012 && operand_equal_p (TREE_OPERAND (arg0, 1),
8013 const_binop (MINUS_EXPR, arg2,
8014 integer_one_node, 0), 1))
8015 return pedantic_non_lvalue
8016 (fold (build (MAX_EXPR, type, arg1, arg2)));
8017 break;
8018
8019 case GE_EXPR:
8020 /* If C1 is C2 + 1, this is max(A, C2). */
8021 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8022 && operand_equal_p (TREE_OPERAND (arg0, 1),
8023 const_binop (PLUS_EXPR, arg2,
8024 integer_one_node, 0), 1))
8025 return pedantic_non_lvalue
8026 (fold (build (MAX_EXPR, type, arg1, arg2)));
8027 break;
8028 case NE_EXPR:
8029 break;
8030 default:
8031 abort ();
8032 }
8033 }
8034
8035 /* If the second operand is simpler than the third, swap them
8036 since that produces better jump optimization results. */
8037 if ((TREE_CONSTANT (arg1) || DECL_P (arg1)
8038 || TREE_CODE (arg1) == SAVE_EXPR)
8039 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
8040 || DECL_P (TREE_OPERAND (t, 2))
8041 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
8042 {
8043 /* See if this can be inverted. If it can't, possibly because
8044 it was a floating-point inequality comparison, don't do
8045 anything. */
8046 tem = invert_truthvalue (arg0);
8047
8048 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8049 {
8050 t = build (code, type, tem,
8051 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
8052 arg0 = tem;
8053 /* arg1 should be the first argument of the new T. */
8054 arg1 = TREE_OPERAND (t, 1);
8055 STRIP_NOPS (arg1);
8056 }
8057 }
8058
8059 /* Convert A ? 1 : 0 to simply A. */
8060 if (integer_onep (TREE_OPERAND (t, 1))
8061 && integer_zerop (TREE_OPERAND (t, 2))
8062 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8063 call to fold will try to move the conversion inside
8064 a COND, which will recurse. In that case, the COND_EXPR
8065 is probably the best choice, so leave it alone. */
8066 && type == TREE_TYPE (arg0))
8067 return pedantic_non_lvalue (arg0);
8068
8069 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8070 over COND_EXPR in cases such as floating point comparisons. */
8071 if (integer_zerop (TREE_OPERAND (t, 1))
8072 && integer_onep (TREE_OPERAND (t, 2))
8073 && truth_value_p (TREE_CODE (arg0)))
8074 return pedantic_non_lvalue (convert (type,
8075 invert_truthvalue (arg0)));
8076
8077 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8078 operation is simply A & 2. */
8079
8080 if (integer_zerop (TREE_OPERAND (t, 2))
8081 && TREE_CODE (arg0) == NE_EXPR
8082 && integer_zerop (TREE_OPERAND (arg0, 1))
8083 && integer_pow2p (arg1)
8084 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8085 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8086 arg1, 1))
8087 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
8088
8089 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8090 if (integer_zerop (TREE_OPERAND (t, 2))
8091 && truth_value_p (TREE_CODE (arg0))
8092 && truth_value_p (TREE_CODE (arg1)))
8093 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
8094 arg0, arg1)));
8095
8096 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8097 if (integer_onep (TREE_OPERAND (t, 2))
8098 && truth_value_p (TREE_CODE (arg0))
8099 && truth_value_p (TREE_CODE (arg1)))
8100 {
8101 /* Only perform transformation if ARG0 is easily inverted. */
8102 tem = invert_truthvalue (arg0);
8103 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8104 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
8105 tem, arg1)));
8106 }
8107
8108 return t;
8109
8110 case COMPOUND_EXPR:
8111 /* When pedantic, a compound expression can be neither an lvalue
8112 nor an integer constant expression. */
8113 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
8114 return t;
8115 /* Don't let (0, 0) be null pointer constant. */
8116 if (integer_zerop (arg1))
8117 return build1 (NOP_EXPR, type, arg1);
8118 return convert (type, arg1);
8119
8120 case COMPLEX_EXPR:
8121 if (wins)
8122 return build_complex (type, arg0, arg1);
8123 return t;
8124
8125 case REALPART_EXPR:
8126 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8127 return t;
8128 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8129 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8130 TREE_OPERAND (arg0, 1));
8131 else if (TREE_CODE (arg0) == COMPLEX_CST)
8132 return TREE_REALPART (arg0);
8133 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8134 return fold (build (TREE_CODE (arg0), type,
8135 fold (build1 (REALPART_EXPR, type,
8136 TREE_OPERAND (arg0, 0))),
8137 fold (build1 (REALPART_EXPR,
8138 type, TREE_OPERAND (arg0, 1)))));
8139 return t;
8140
8141 case IMAGPART_EXPR:
8142 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8143 return convert (type, integer_zero_node);
8144 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8145 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8146 TREE_OPERAND (arg0, 0));
8147 else if (TREE_CODE (arg0) == COMPLEX_CST)
8148 return TREE_IMAGPART (arg0);
8149 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8150 return fold (build (TREE_CODE (arg0), type,
8151 fold (build1 (IMAGPART_EXPR, type,
8152 TREE_OPERAND (arg0, 0))),
8153 fold (build1 (IMAGPART_EXPR, type,
8154 TREE_OPERAND (arg0, 1)))));
8155 return t;
8156
8157 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8158 appropriate. */
8159 case CLEANUP_POINT_EXPR:
8160 if (! has_cleanups (arg0))
8161 return TREE_OPERAND (t, 0);
8162
8163 {
8164 enum tree_code code0 = TREE_CODE (arg0);
8165 int kind0 = TREE_CODE_CLASS (code0);
8166 tree arg00 = TREE_OPERAND (arg0, 0);
8167 tree arg01;
8168
8169 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8170 return fold (build1 (code0, type,
8171 fold (build1 (CLEANUP_POINT_EXPR,
8172 TREE_TYPE (arg00), arg00))));
8173
8174 if (kind0 == '<' || kind0 == '2'
8175 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8176 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8177 || code0 == TRUTH_XOR_EXPR)
8178 {
8179 arg01 = TREE_OPERAND (arg0, 1);
8180
8181 if (TREE_CONSTANT (arg00)
8182 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8183 && ! has_cleanups (arg00)))
8184 return fold (build (code0, type, arg00,
8185 fold (build1 (CLEANUP_POINT_EXPR,
8186 TREE_TYPE (arg01), arg01))));
8187
8188 if (TREE_CONSTANT (arg01))
8189 return fold (build (code0, type,
8190 fold (build1 (CLEANUP_POINT_EXPR,
8191 TREE_TYPE (arg00), arg00)),
8192 arg01));
8193 }
8194
8195 return t;
8196 }
8197
8198 case CALL_EXPR:
8199 /* Check for a built-in function. */
8200 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
8201 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
8202 == FUNCTION_DECL)
8203 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
8204 {
8205 tree tmp = fold_builtin (expr);
8206 if (tmp)
8207 return tmp;
8208 }
8209 return t;
8210
8211 default:
8212 return t;
8213 } /* switch (code) */
8214 }
8215
8216 #ifdef ENABLE_FOLD_CHECKING
8217 #undef fold
8218
8219 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8220 static void fold_check_failed (tree, tree);
8221 void print_fold_checksum (tree);
8222
8223 /* When --enable-checking=fold, compute a digest of expr before
8224 and after actual fold call to see if fold did not accidentally
8225 change original expr. */
8226
8227 tree
8228 fold (tree expr)
8229 {
8230 tree ret;
8231 struct md5_ctx ctx;
8232 unsigned char checksum_before[16], checksum_after[16];
8233 htab_t ht;
8234
8235 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8236 md5_init_ctx (&ctx);
8237 fold_checksum_tree (expr, &ctx, ht);
8238 md5_finish_ctx (&ctx, checksum_before);
8239 htab_empty (ht);
8240
8241 ret = fold_1 (expr);
8242
8243 md5_init_ctx (&ctx);
8244 fold_checksum_tree (expr, &ctx, ht);
8245 md5_finish_ctx (&ctx, checksum_after);
8246 htab_delete (ht);
8247
8248 if (memcmp (checksum_before, checksum_after, 16))
8249 fold_check_failed (expr, ret);
8250
8251 return ret;
8252 }
8253
8254 void
8255 print_fold_checksum (tree expr)
8256 {
8257 struct md5_ctx ctx;
8258 unsigned char checksum[16], cnt;
8259 htab_t ht;
8260
8261 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8262 md5_init_ctx (&ctx);
8263 fold_checksum_tree (expr, &ctx, ht);
8264 md5_finish_ctx (&ctx, checksum);
8265 htab_delete (ht);
8266 for (cnt = 0; cnt < 16; ++cnt)
8267 fprintf (stderr, "%02x", checksum[cnt]);
8268 putc ('\n', stderr);
8269 }
8270
8271 static void
8272 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8273 {
8274 internal_error ("fold check: original tree changed by fold");
8275 }
8276
8277 static void
8278 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8279 {
8280 void **slot;
8281 enum tree_code code;
8282 char buf[sizeof (struct tree_decl)];
8283 int i, len;
8284
8285 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8286 > sizeof (struct tree_decl)
8287 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8288 abort ();
8289 if (expr == NULL)
8290 return;
8291 slot = htab_find_slot (ht, expr, INSERT);
8292 if (*slot != NULL)
8293 return;
8294 *slot = expr;
8295 code = TREE_CODE (expr);
8296 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8297 {
8298 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8299 memcpy (buf, expr, tree_size (expr));
8300 expr = (tree) buf;
8301 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8302 }
8303 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8304 {
8305 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8306 memcpy (buf, expr, tree_size (expr));
8307 expr = (tree) buf;
8308 SET_DECL_ASSEMBLER_NAME (expr, NULL);
8309 }
8310 else if (TREE_CODE_CLASS (code) == 't'
8311 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8312 {
8313 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8314 memcpy (buf, expr, tree_size (expr));
8315 expr = (tree) buf;
8316 TYPE_POINTER_TO (expr) = NULL;
8317 TYPE_REFERENCE_TO (expr) = NULL;
8318 }
8319 md5_process_bytes (expr, tree_size (expr), ctx);
8320 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8321 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8322 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8323 len = TREE_CODE_LENGTH (code);
8324 switch (TREE_CODE_CLASS (code))
8325 {
8326 case 'c':
8327 switch (code)
8328 {
8329 case STRING_CST:
8330 md5_process_bytes (TREE_STRING_POINTER (expr),
8331 TREE_STRING_LENGTH (expr), ctx);
8332 break;
8333 case COMPLEX_CST:
8334 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8335 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8336 break;
8337 case VECTOR_CST:
8338 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8339 break;
8340 default:
8341 break;
8342 }
8343 break;
8344 case 'x':
8345 switch (code)
8346 {
8347 case TREE_LIST:
8348 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8349 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8350 break;
8351 case TREE_VEC:
8352 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8353 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8354 break;
8355 default:
8356 break;
8357 }
8358 break;
8359 case 'e':
8360 switch (code)
8361 {
8362 case SAVE_EXPR: len = 2; break;
8363 case GOTO_SUBROUTINE_EXPR: len = 0; break;
8364 case RTL_EXPR: len = 0; break;
8365 case WITH_CLEANUP_EXPR: len = 2; break;
8366 default: break;
8367 }
8368 /* FALLTHROUGH */
8369 case 'r':
8370 case '<':
8371 case '1':
8372 case '2':
8373 case 's':
8374 for (i = 0; i < len; ++i)
8375 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8376 break;
8377 case 'd':
8378 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8379 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8380 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8381 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8382 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8383 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8384 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8385 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8386 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8387 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8388 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8389 break;
8390 case 't':
8391 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
8392 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
8393 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
8394 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
8395 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
8396 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
8397 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
8398 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
8399 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
8400 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
8401 break;
8402 default:
8403 break;
8404 }
8405 }
8406
8407 #endif
8408
8409 /* Perform constant folding and related simplification of initializer
8410 expression EXPR. This behaves identically to "fold" but ignores
8411 potential run-time traps and exceptions that fold must preserve. */
8412
8413 tree
8414 fold_initializer (tree expr)
8415 {
8416 int saved_signaling_nans = flag_signaling_nans;
8417 int saved_trapping_math = flag_trapping_math;
8418 int saved_trapv = flag_trapv;
8419 tree result;
8420
8421 flag_signaling_nans = 0;
8422 flag_trapping_math = 0;
8423 flag_trapv = 0;
8424
8425 result = fold (expr);
8426
8427 flag_signaling_nans = saved_signaling_nans;
8428 flag_trapping_math = saved_trapping_math;
8429 flag_trapv = saved_trapv;
8430
8431 return result;
8432 }
8433
8434 /* Determine if first argument is a multiple of second argument. Return 0 if
8435 it is not, or we cannot easily determined it to be.
8436
8437 An example of the sort of thing we care about (at this point; this routine
8438 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8439 fold cases do now) is discovering that
8440
8441 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8442
8443 is a multiple of
8444
8445 SAVE_EXPR (J * 8)
8446
8447 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8448
8449 This code also handles discovering that
8450
8451 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8452
8453 is a multiple of 8 so we don't have to worry about dealing with a
8454 possible remainder.
8455
8456 Note that we *look* inside a SAVE_EXPR only to determine how it was
8457 calculated; it is not safe for fold to do much of anything else with the
8458 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8459 at run time. For example, the latter example above *cannot* be implemented
8460 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8461 evaluation time of the original SAVE_EXPR is not necessarily the same at
8462 the time the new expression is evaluated. The only optimization of this
8463 sort that would be valid is changing
8464
8465 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8466
8467 divided by 8 to
8468
8469 SAVE_EXPR (I) * SAVE_EXPR (J)
8470
8471 (where the same SAVE_EXPR (J) is used in the original and the
8472 transformed version). */
8473
8474 static int
8475 multiple_of_p (tree type, tree top, tree bottom)
8476 {
8477 if (operand_equal_p (top, bottom, 0))
8478 return 1;
8479
8480 if (TREE_CODE (type) != INTEGER_TYPE)
8481 return 0;
8482
8483 switch (TREE_CODE (top))
8484 {
8485 case MULT_EXPR:
8486 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8487 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8488
8489 case PLUS_EXPR:
8490 case MINUS_EXPR:
8491 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8492 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8493
8494 case LSHIFT_EXPR:
8495 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
8496 {
8497 tree op1, t1;
8498
8499 op1 = TREE_OPERAND (top, 1);
8500 /* const_binop may not detect overflow correctly,
8501 so check for it explicitly here. */
8502 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
8503 > TREE_INT_CST_LOW (op1)
8504 && TREE_INT_CST_HIGH (op1) == 0
8505 && 0 != (t1 = convert (type,
8506 const_binop (LSHIFT_EXPR, size_one_node,
8507 op1, 0)))
8508 && ! TREE_OVERFLOW (t1))
8509 return multiple_of_p (type, t1, bottom);
8510 }
8511 return 0;
8512
8513 case NOP_EXPR:
8514 /* Can't handle conversions from non-integral or wider integral type. */
8515 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
8516 || (TYPE_PRECISION (type)
8517 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
8518 return 0;
8519
8520 /* .. fall through ... */
8521
8522 case SAVE_EXPR:
8523 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
8524
8525 case INTEGER_CST:
8526 if (TREE_CODE (bottom) != INTEGER_CST
8527 || (TREE_UNSIGNED (type)
8528 && (tree_int_cst_sgn (top) < 0
8529 || tree_int_cst_sgn (bottom) < 0)))
8530 return 0;
8531 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
8532 top, bottom, 0));
8533
8534 default:
8535 return 0;
8536 }
8537 }
8538
8539 /* Return true if `t' is known to be non-negative. */
8540
8541 int
8542 tree_expr_nonnegative_p (tree t)
8543 {
8544 switch (TREE_CODE (t))
8545 {
8546 case ABS_EXPR:
8547 case FFS_EXPR:
8548 case POPCOUNT_EXPR:
8549 case PARITY_EXPR:
8550 return 1;
8551
8552 case CLZ_EXPR:
8553 case CTZ_EXPR:
8554 /* These are undefined at zero. This is true even if
8555 C[LT]Z_DEFINED_VALUE_AT_ZERO is set, since what we're
8556 computing here is a user-visible property. */
8557 return 0;
8558
8559 case INTEGER_CST:
8560 return tree_int_cst_sgn (t) >= 0;
8561
8562 case REAL_CST:
8563 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
8564
8565 case PLUS_EXPR:
8566 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8567 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8568 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8569
8570 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8571 both unsigned and at least 2 bits shorter than the result. */
8572 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8573 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8574 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8575 {
8576 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8577 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8578 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8579 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8580 {
8581 unsigned int prec = MAX (TYPE_PRECISION (inner1),
8582 TYPE_PRECISION (inner2)) + 1;
8583 return prec < TYPE_PRECISION (TREE_TYPE (t));
8584 }
8585 }
8586 break;
8587
8588 case MULT_EXPR:
8589 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8590 {
8591 /* x * x for floating point x is always non-negative. */
8592 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8593 return 1;
8594 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8595 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8596 }
8597
8598 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8599 both unsigned and their total bits is shorter than the result. */
8600 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8601 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8602 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8603 {
8604 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8605 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8606 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8607 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8608 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8609 < TYPE_PRECISION (TREE_TYPE (t));
8610 }
8611 return 0;
8612
8613 case TRUNC_DIV_EXPR:
8614 case CEIL_DIV_EXPR:
8615 case FLOOR_DIV_EXPR:
8616 case ROUND_DIV_EXPR:
8617 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8618 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8619
8620 case TRUNC_MOD_EXPR:
8621 case CEIL_MOD_EXPR:
8622 case FLOOR_MOD_EXPR:
8623 case ROUND_MOD_EXPR:
8624 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8625
8626 case RDIV_EXPR:
8627 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8628 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8629
8630 case NOP_EXPR:
8631 {
8632 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8633 tree outer_type = TREE_TYPE (t);
8634
8635 if (TREE_CODE (outer_type) == REAL_TYPE)
8636 {
8637 if (TREE_CODE (inner_type) == REAL_TYPE)
8638 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8639 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8640 {
8641 if (TREE_UNSIGNED (inner_type))
8642 return 1;
8643 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8644 }
8645 }
8646 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8647 {
8648 if (TREE_CODE (inner_type) == REAL_TYPE)
8649 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8650 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8651 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8652 && TREE_UNSIGNED (inner_type);
8653 }
8654 }
8655 break;
8656
8657 case COND_EXPR:
8658 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8659 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
8660 case COMPOUND_EXPR:
8661 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8662 case MIN_EXPR:
8663 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8664 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8665 case MAX_EXPR:
8666 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8667 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8668 case MODIFY_EXPR:
8669 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8670 case BIND_EXPR:
8671 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8672 case SAVE_EXPR:
8673 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8674 case NON_LVALUE_EXPR:
8675 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8676 case FLOAT_EXPR:
8677 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8678 case RTL_EXPR:
8679 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
8680
8681 case CALL_EXPR:
8682 {
8683 tree fndecl = get_callee_fndecl (t);
8684 tree arglist = TREE_OPERAND (t, 1);
8685 if (fndecl
8686 && DECL_BUILT_IN (fndecl)
8687 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
8688 switch (DECL_FUNCTION_CODE (fndecl))
8689 {
8690 case BUILT_IN_CABS:
8691 case BUILT_IN_CABSL:
8692 case BUILT_IN_CABSF:
8693 case BUILT_IN_EXP:
8694 case BUILT_IN_EXPF:
8695 case BUILT_IN_EXPL:
8696 case BUILT_IN_EXP2:
8697 case BUILT_IN_EXP2F:
8698 case BUILT_IN_EXP2L:
8699 case BUILT_IN_EXP10:
8700 case BUILT_IN_EXP10F:
8701 case BUILT_IN_EXP10L:
8702 case BUILT_IN_POW10:
8703 case BUILT_IN_POW10F:
8704 case BUILT_IN_POW10L:
8705 case BUILT_IN_FABS:
8706 case BUILT_IN_FABSF:
8707 case BUILT_IN_FABSL:
8708 case BUILT_IN_SQRT:
8709 case BUILT_IN_SQRTF:
8710 case BUILT_IN_SQRTL:
8711 return 1;
8712
8713 case BUILT_IN_ATAN:
8714 case BUILT_IN_ATANF:
8715 case BUILT_IN_ATANL:
8716 case BUILT_IN_CEIL:
8717 case BUILT_IN_CEILF:
8718 case BUILT_IN_CEILL:
8719 case BUILT_IN_FLOOR:
8720 case BUILT_IN_FLOORF:
8721 case BUILT_IN_FLOORL:
8722 case BUILT_IN_NEARBYINT:
8723 case BUILT_IN_NEARBYINTF:
8724 case BUILT_IN_NEARBYINTL:
8725 case BUILT_IN_ROUND:
8726 case BUILT_IN_ROUNDF:
8727 case BUILT_IN_ROUNDL:
8728 case BUILT_IN_TRUNC:
8729 case BUILT_IN_TRUNCF:
8730 case BUILT_IN_TRUNCL:
8731 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8732
8733 case BUILT_IN_POW:
8734 case BUILT_IN_POWF:
8735 case BUILT_IN_POWL:
8736 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8737
8738 default:
8739 break;
8740 }
8741 }
8742
8743 /* ... fall through ... */
8744
8745 default:
8746 if (truth_value_p (TREE_CODE (t)))
8747 /* Truth values evaluate to 0 or 1, which is nonnegative. */
8748 return 1;
8749 }
8750
8751 /* We don't know sign of `t', so be conservative and return false. */
8752 return 0;
8753 }
8754
8755 /* Return true if `r' is known to be non-negative.
8756 Only handles constants at the moment. */
8757
8758 int
8759 rtl_expr_nonnegative_p (rtx r)
8760 {
8761 switch (GET_CODE (r))
8762 {
8763 case CONST_INT:
8764 return INTVAL (r) >= 0;
8765
8766 case CONST_DOUBLE:
8767 if (GET_MODE (r) == VOIDmode)
8768 return CONST_DOUBLE_HIGH (r) >= 0;
8769 return 0;
8770
8771 case CONST_VECTOR:
8772 {
8773 int units, i;
8774 rtx elt;
8775
8776 units = CONST_VECTOR_NUNITS (r);
8777
8778 for (i = 0; i < units; ++i)
8779 {
8780 elt = CONST_VECTOR_ELT (r, i);
8781 if (!rtl_expr_nonnegative_p (elt))
8782 return 0;
8783 }
8784
8785 return 1;
8786 }
8787
8788 case SYMBOL_REF:
8789 case LABEL_REF:
8790 /* These are always nonnegative. */
8791 return 1;
8792
8793 default:
8794 return 0;
8795 }
8796 }
8797
8798 #include "gt-fold-const.h"