tree.def (FFS_EXPR, [...]): Delete unused tree codes.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "real.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "toplev.h"
56 #include "ggc.h"
57 #include "hashtab.h"
58 #include "langhooks.h"
59 #include "md5.h"
60
61 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
62 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
63 static bool negate_expr_p (tree);
64 static tree negate_expr (tree);
65 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
66 static tree associate_trees (tree, tree, enum tree_code, tree);
67 static tree int_const_binop (enum tree_code, tree, tree, int);
68 static tree const_binop (enum tree_code, tree, tree, int);
69 static hashval_t size_htab_hash (const void *);
70 static int size_htab_eq (const void *, const void *);
71 static tree fold_convert (tree, tree);
72 static enum tree_code invert_tree_comparison (enum tree_code);
73 static enum tree_code swap_tree_comparison (enum tree_code);
74 static int comparison_to_compcode (enum tree_code);
75 static enum tree_code compcode_to_comparison (int);
76 static int truth_value_p (enum tree_code);
77 static int operand_equal_for_comparison_p (tree, tree, tree);
78 static int twoval_comparison_p (tree, tree *, tree *, int *);
79 static tree eval_subst (tree, tree, tree, tree, tree);
80 static tree pedantic_omit_one_operand (tree, tree, tree);
81 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
82 static tree make_bit_field_ref (tree, tree, int, int, int);
83 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
84 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
85 enum machine_mode *, int *, int *,
86 tree *, tree *);
87 static int all_ones_mask_p (tree, int);
88 static tree sign_bit_p (tree, tree);
89 static int simple_operand_p (tree);
90 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
91 static tree make_range (tree, int *, tree *, tree *);
92 static tree build_range_check (tree, tree, int, tree, tree);
93 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
94 tree);
95 static tree fold_range_test (tree);
96 static tree unextend (tree, int, int, tree);
97 static tree fold_truthop (enum tree_code, tree, tree, tree);
98 static tree optimize_minmax_comparison (tree);
99 static tree extract_muldiv (tree, tree, enum tree_code, tree);
100 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
101 static tree strip_compound_expr (tree, tree);
102 static int multiple_of_p (tree, tree, tree);
103 static tree constant_boolean_node (int, tree);
104 static int count_cond (tree, int);
105 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
106 tree, int);
107 static bool fold_real_zero_addition_p (tree, tree, int);
108 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
109 tree, tree, tree);
110 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
111 static bool tree_swap_operands_p (tree, tree);
112
113 /* The following constants represent a bit based encoding of GCC's
114 comparison operators. This encoding simplifies transformations
115 on relational comparison operators, such as AND and OR. */
116 #define COMPCODE_FALSE 0
117 #define COMPCODE_LT 1
118 #define COMPCODE_EQ 2
119 #define COMPCODE_LE 3
120 #define COMPCODE_GT 4
121 #define COMPCODE_NE 5
122 #define COMPCODE_GE 6
123 #define COMPCODE_TRUE 7
124
125 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
126 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
127 and SUM1. Then this yields nonzero if overflow occurred during the
128 addition.
129
130 Overflow occurs if A and B have the same sign, but A and SUM differ in
131 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
132 sign. */
133 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
134 \f
135 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
136 We do that by representing the two-word integer in 4 words, with only
137 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
138 number. The value of the word is LOWPART + HIGHPART * BASE. */
139
140 #define LOWPART(x) \
141 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
142 #define HIGHPART(x) \
143 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
144 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
145
146 /* Unpack a two-word integer into 4 words.
147 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
148 WORDS points to the array of HOST_WIDE_INTs. */
149
150 static void
151 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
152 {
153 words[0] = LOWPART (low);
154 words[1] = HIGHPART (low);
155 words[2] = LOWPART (hi);
156 words[3] = HIGHPART (hi);
157 }
158
159 /* Pack an array of 4 words into a two-word integer.
160 WORDS points to the array of words.
161 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
162
163 static void
164 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
165 HOST_WIDE_INT *hi)
166 {
167 *low = words[0] + words[1] * BASE;
168 *hi = words[2] + words[3] * BASE;
169 }
170 \f
171 /* Make the integer constant T valid for its type by setting to 0 or 1 all
172 the bits in the constant that don't belong in the type.
173
174 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
175 nonzero, a signed overflow has already occurred in calculating T, so
176 propagate it. */
177
178 int
179 force_fit_type (tree t, int overflow)
180 {
181 unsigned HOST_WIDE_INT low;
182 HOST_WIDE_INT high;
183 unsigned int prec;
184
185 if (TREE_CODE (t) == REAL_CST)
186 {
187 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
188 Consider doing it via real_convert now. */
189 return overflow;
190 }
191
192 else if (TREE_CODE (t) != INTEGER_CST)
193 return overflow;
194
195 low = TREE_INT_CST_LOW (t);
196 high = TREE_INT_CST_HIGH (t);
197
198 if (POINTER_TYPE_P (TREE_TYPE (t))
199 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
200 prec = POINTER_SIZE;
201 else
202 prec = TYPE_PRECISION (TREE_TYPE (t));
203
204 /* First clear all bits that are beyond the type's precision. */
205
206 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
207 ;
208 else if (prec > HOST_BITS_PER_WIDE_INT)
209 TREE_INT_CST_HIGH (t)
210 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
211 else
212 {
213 TREE_INT_CST_HIGH (t) = 0;
214 if (prec < HOST_BITS_PER_WIDE_INT)
215 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
216 }
217
218 /* Unsigned types do not suffer sign extension or overflow unless they
219 are a sizetype. */
220 if (TREE_UNSIGNED (TREE_TYPE (t))
221 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
222 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
223 return overflow;
224
225 /* If the value's sign bit is set, extend the sign. */
226 if (prec != 2 * HOST_BITS_PER_WIDE_INT
227 && (prec > HOST_BITS_PER_WIDE_INT
228 ? 0 != (TREE_INT_CST_HIGH (t)
229 & ((HOST_WIDE_INT) 1
230 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
231 : 0 != (TREE_INT_CST_LOW (t)
232 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
233 {
234 /* Value is negative:
235 set to 1 all the bits that are outside this type's precision. */
236 if (prec > HOST_BITS_PER_WIDE_INT)
237 TREE_INT_CST_HIGH (t)
238 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
239 else
240 {
241 TREE_INT_CST_HIGH (t) = -1;
242 if (prec < HOST_BITS_PER_WIDE_INT)
243 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
244 }
245 }
246
247 /* Return nonzero if signed overflow occurred. */
248 return
249 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
250 != 0);
251 }
252 \f
253 /* Add two doubleword integers with doubleword result.
254 Each argument is given as two `HOST_WIDE_INT' pieces.
255 One argument is L1 and H1; the other, L2 and H2.
256 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
257
258 int
259 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
260 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
261 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
262 {
263 unsigned HOST_WIDE_INT l;
264 HOST_WIDE_INT h;
265
266 l = l1 + l2;
267 h = h1 + h2 + (l < l1);
268
269 *lv = l;
270 *hv = h;
271 return OVERFLOW_SUM_SIGN (h1, h2, h);
272 }
273
274 /* Negate a doubleword integer with doubleword result.
275 Return nonzero if the operation overflows, assuming it's signed.
276 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
277 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
278
279 int
280 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
281 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
282 {
283 if (l1 == 0)
284 {
285 *lv = 0;
286 *hv = - h1;
287 return (*hv & h1) < 0;
288 }
289 else
290 {
291 *lv = -l1;
292 *hv = ~h1;
293 return 0;
294 }
295 }
296 \f
297 /* Multiply two doubleword integers with doubleword result.
298 Return nonzero if the operation overflows, assuming it's signed.
299 Each argument is given as two `HOST_WIDE_INT' pieces.
300 One argument is L1 and H1; the other, L2 and H2.
301 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
302
303 int
304 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
305 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
306 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
307 {
308 HOST_WIDE_INT arg1[4];
309 HOST_WIDE_INT arg2[4];
310 HOST_WIDE_INT prod[4 * 2];
311 unsigned HOST_WIDE_INT carry;
312 int i, j, k;
313 unsigned HOST_WIDE_INT toplow, neglow;
314 HOST_WIDE_INT tophigh, neghigh;
315
316 encode (arg1, l1, h1);
317 encode (arg2, l2, h2);
318
319 memset (prod, 0, sizeof prod);
320
321 for (i = 0; i < 4; i++)
322 {
323 carry = 0;
324 for (j = 0; j < 4; j++)
325 {
326 k = i + j;
327 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
328 carry += arg1[i] * arg2[j];
329 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
330 carry += prod[k];
331 prod[k] = LOWPART (carry);
332 carry = HIGHPART (carry);
333 }
334 prod[i + 4] = carry;
335 }
336
337 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
338
339 /* Check for overflow by calculating the top half of the answer in full;
340 it should agree with the low half's sign bit. */
341 decode (prod + 4, &toplow, &tophigh);
342 if (h1 < 0)
343 {
344 neg_double (l2, h2, &neglow, &neghigh);
345 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
346 }
347 if (h2 < 0)
348 {
349 neg_double (l1, h1, &neglow, &neghigh);
350 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
351 }
352 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
353 }
354 \f
355 /* Shift the doubleword integer in L1, H1 left by COUNT places
356 keeping only PREC bits of result.
357 Shift right if COUNT is negative.
358 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
359 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
360
361 void
362 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
363 HOST_WIDE_INT count, unsigned int prec,
364 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
365 {
366 unsigned HOST_WIDE_INT signmask;
367
368 if (count < 0)
369 {
370 rshift_double (l1, h1, -count, prec, lv, hv, arith);
371 return;
372 }
373
374 #ifdef SHIFT_COUNT_TRUNCATED
375 if (SHIFT_COUNT_TRUNCATED)
376 count %= prec;
377 #endif
378
379 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
380 {
381 /* Shifting by the host word size is undefined according to the
382 ANSI standard, so we must handle this as a special case. */
383 *hv = 0;
384 *lv = 0;
385 }
386 else if (count >= HOST_BITS_PER_WIDE_INT)
387 {
388 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
389 *lv = 0;
390 }
391 else
392 {
393 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
394 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
395 *lv = l1 << count;
396 }
397
398 /* Sign extend all bits that are beyond the precision. */
399
400 signmask = -((prec > HOST_BITS_PER_WIDE_INT
401 ? ((unsigned HOST_WIDE_INT) *hv
402 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
403 : (*lv >> (prec - 1))) & 1);
404
405 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
406 ;
407 else if (prec >= HOST_BITS_PER_WIDE_INT)
408 {
409 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
410 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
411 }
412 else
413 {
414 *hv = signmask;
415 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
416 *lv |= signmask << prec;
417 }
418 }
419
420 /* Shift the doubleword integer in L1, H1 right by COUNT places
421 keeping only PREC bits of result. COUNT must be positive.
422 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
423 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
424
425 void
426 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
427 HOST_WIDE_INT count, unsigned int prec,
428 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
429 int arith)
430 {
431 unsigned HOST_WIDE_INT signmask;
432
433 signmask = (arith
434 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
435 : 0);
436
437 #ifdef SHIFT_COUNT_TRUNCATED
438 if (SHIFT_COUNT_TRUNCATED)
439 count %= prec;
440 #endif
441
442 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
443 {
444 /* Shifting by the host word size is undefined according to the
445 ANSI standard, so we must handle this as a special case. */
446 *hv = 0;
447 *lv = 0;
448 }
449 else if (count >= HOST_BITS_PER_WIDE_INT)
450 {
451 *hv = 0;
452 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
453 }
454 else
455 {
456 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
457 *lv = ((l1 >> count)
458 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
459 }
460
461 /* Zero / sign extend all bits that are beyond the precision. */
462
463 if (count >= (HOST_WIDE_INT)prec)
464 {
465 *hv = signmask;
466 *lv = signmask;
467 }
468 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
469 ;
470 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
471 {
472 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
473 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
474 }
475 else
476 {
477 *hv = signmask;
478 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
479 *lv |= signmask << (prec - count);
480 }
481 }
482 \f
483 /* Rotate the doubleword integer in L1, H1 left by COUNT places
484 keeping only PREC bits of result.
485 Rotate right if COUNT is negative.
486 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
487
488 void
489 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
490 HOST_WIDE_INT count, unsigned int prec,
491 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
492 {
493 unsigned HOST_WIDE_INT s1l, s2l;
494 HOST_WIDE_INT s1h, s2h;
495
496 count %= prec;
497 if (count < 0)
498 count += prec;
499
500 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
501 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
502 *lv = s1l | s2l;
503 *hv = s1h | s2h;
504 }
505
506 /* Rotate the doubleword integer in L1, H1 left by COUNT places
507 keeping only PREC bits of result. COUNT must be positive.
508 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
509
510 void
511 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
512 HOST_WIDE_INT count, unsigned int prec,
513 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
514 {
515 unsigned HOST_WIDE_INT s1l, s2l;
516 HOST_WIDE_INT s1h, s2h;
517
518 count %= prec;
519 if (count < 0)
520 count += prec;
521
522 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
523 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
524 *lv = s1l | s2l;
525 *hv = s1h | s2h;
526 }
527 \f
528 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
529 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
530 CODE is a tree code for a kind of division, one of
531 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
532 or EXACT_DIV_EXPR
533 It controls how the quotient is rounded to an integer.
534 Return nonzero if the operation overflows.
535 UNS nonzero says do unsigned division. */
536
537 int
538 div_and_round_double (enum tree_code code, int uns,
539 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
540 HOST_WIDE_INT hnum_orig,
541 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
542 HOST_WIDE_INT hden_orig,
543 unsigned HOST_WIDE_INT *lquo,
544 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
545 HOST_WIDE_INT *hrem)
546 {
547 int quo_neg = 0;
548 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
549 HOST_WIDE_INT den[4], quo[4];
550 int i, j;
551 unsigned HOST_WIDE_INT work;
552 unsigned HOST_WIDE_INT carry = 0;
553 unsigned HOST_WIDE_INT lnum = lnum_orig;
554 HOST_WIDE_INT hnum = hnum_orig;
555 unsigned HOST_WIDE_INT lden = lden_orig;
556 HOST_WIDE_INT hden = hden_orig;
557 int overflow = 0;
558
559 if (hden == 0 && lden == 0)
560 overflow = 1, lden = 1;
561
562 /* Calculate quotient sign and convert operands to unsigned. */
563 if (!uns)
564 {
565 if (hnum < 0)
566 {
567 quo_neg = ~ quo_neg;
568 /* (minimum integer) / (-1) is the only overflow case. */
569 if (neg_double (lnum, hnum, &lnum, &hnum)
570 && ((HOST_WIDE_INT) lden & hden) == -1)
571 overflow = 1;
572 }
573 if (hden < 0)
574 {
575 quo_neg = ~ quo_neg;
576 neg_double (lden, hden, &lden, &hden);
577 }
578 }
579
580 if (hnum == 0 && hden == 0)
581 { /* single precision */
582 *hquo = *hrem = 0;
583 /* This unsigned division rounds toward zero. */
584 *lquo = lnum / lden;
585 goto finish_up;
586 }
587
588 if (hnum == 0)
589 { /* trivial case: dividend < divisor */
590 /* hden != 0 already checked. */
591 *hquo = *lquo = 0;
592 *hrem = hnum;
593 *lrem = lnum;
594 goto finish_up;
595 }
596
597 memset (quo, 0, sizeof quo);
598
599 memset (num, 0, sizeof num); /* to zero 9th element */
600 memset (den, 0, sizeof den);
601
602 encode (num, lnum, hnum);
603 encode (den, lden, hden);
604
605 /* Special code for when the divisor < BASE. */
606 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
607 {
608 /* hnum != 0 already checked. */
609 for (i = 4 - 1; i >= 0; i--)
610 {
611 work = num[i] + carry * BASE;
612 quo[i] = work / lden;
613 carry = work % lden;
614 }
615 }
616 else
617 {
618 /* Full double precision division,
619 with thanks to Don Knuth's "Seminumerical Algorithms". */
620 int num_hi_sig, den_hi_sig;
621 unsigned HOST_WIDE_INT quo_est, scale;
622
623 /* Find the highest nonzero divisor digit. */
624 for (i = 4 - 1;; i--)
625 if (den[i] != 0)
626 {
627 den_hi_sig = i;
628 break;
629 }
630
631 /* Insure that the first digit of the divisor is at least BASE/2.
632 This is required by the quotient digit estimation algorithm. */
633
634 scale = BASE / (den[den_hi_sig] + 1);
635 if (scale > 1)
636 { /* scale divisor and dividend */
637 carry = 0;
638 for (i = 0; i <= 4 - 1; i++)
639 {
640 work = (num[i] * scale) + carry;
641 num[i] = LOWPART (work);
642 carry = HIGHPART (work);
643 }
644
645 num[4] = carry;
646 carry = 0;
647 for (i = 0; i <= 4 - 1; i++)
648 {
649 work = (den[i] * scale) + carry;
650 den[i] = LOWPART (work);
651 carry = HIGHPART (work);
652 if (den[i] != 0) den_hi_sig = i;
653 }
654 }
655
656 num_hi_sig = 4;
657
658 /* Main loop */
659 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
660 {
661 /* Guess the next quotient digit, quo_est, by dividing the first
662 two remaining dividend digits by the high order quotient digit.
663 quo_est is never low and is at most 2 high. */
664 unsigned HOST_WIDE_INT tmp;
665
666 num_hi_sig = i + den_hi_sig + 1;
667 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
668 if (num[num_hi_sig] != den[den_hi_sig])
669 quo_est = work / den[den_hi_sig];
670 else
671 quo_est = BASE - 1;
672
673 /* Refine quo_est so it's usually correct, and at most one high. */
674 tmp = work - quo_est * den[den_hi_sig];
675 if (tmp < BASE
676 && (den[den_hi_sig - 1] * quo_est
677 > (tmp * BASE + num[num_hi_sig - 2])))
678 quo_est--;
679
680 /* Try QUO_EST as the quotient digit, by multiplying the
681 divisor by QUO_EST and subtracting from the remaining dividend.
682 Keep in mind that QUO_EST is the I - 1st digit. */
683
684 carry = 0;
685 for (j = 0; j <= den_hi_sig; j++)
686 {
687 work = quo_est * den[j] + carry;
688 carry = HIGHPART (work);
689 work = num[i + j] - LOWPART (work);
690 num[i + j] = LOWPART (work);
691 carry += HIGHPART (work) != 0;
692 }
693
694 /* If quo_est was high by one, then num[i] went negative and
695 we need to correct things. */
696 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
697 {
698 quo_est--;
699 carry = 0; /* add divisor back in */
700 for (j = 0; j <= den_hi_sig; j++)
701 {
702 work = num[i + j] + den[j] + carry;
703 carry = HIGHPART (work);
704 num[i + j] = LOWPART (work);
705 }
706
707 num [num_hi_sig] += carry;
708 }
709
710 /* Store the quotient digit. */
711 quo[i] = quo_est;
712 }
713 }
714
715 decode (quo, lquo, hquo);
716
717 finish_up:
718 /* If result is negative, make it so. */
719 if (quo_neg)
720 neg_double (*lquo, *hquo, lquo, hquo);
721
722 /* compute trial remainder: rem = num - (quo * den) */
723 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
724 neg_double (*lrem, *hrem, lrem, hrem);
725 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
726
727 switch (code)
728 {
729 case TRUNC_DIV_EXPR:
730 case TRUNC_MOD_EXPR: /* round toward zero */
731 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
732 return overflow;
733
734 case FLOOR_DIV_EXPR:
735 case FLOOR_MOD_EXPR: /* round toward negative infinity */
736 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
737 {
738 /* quo = quo - 1; */
739 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
740 lquo, hquo);
741 }
742 else
743 return overflow;
744 break;
745
746 case CEIL_DIV_EXPR:
747 case CEIL_MOD_EXPR: /* round toward positive infinity */
748 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
749 {
750 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
751 lquo, hquo);
752 }
753 else
754 return overflow;
755 break;
756
757 case ROUND_DIV_EXPR:
758 case ROUND_MOD_EXPR: /* round to closest integer */
759 {
760 unsigned HOST_WIDE_INT labs_rem = *lrem;
761 HOST_WIDE_INT habs_rem = *hrem;
762 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
763 HOST_WIDE_INT habs_den = hden, htwice;
764
765 /* Get absolute values. */
766 if (*hrem < 0)
767 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
768 if (hden < 0)
769 neg_double (lden, hden, &labs_den, &habs_den);
770
771 /* If (2 * abs (lrem) >= abs (lden)) */
772 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
773 labs_rem, habs_rem, &ltwice, &htwice);
774
775 if (((unsigned HOST_WIDE_INT) habs_den
776 < (unsigned HOST_WIDE_INT) htwice)
777 || (((unsigned HOST_WIDE_INT) habs_den
778 == (unsigned HOST_WIDE_INT) htwice)
779 && (labs_den < ltwice)))
780 {
781 if (*hquo < 0)
782 /* quo = quo - 1; */
783 add_double (*lquo, *hquo,
784 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
785 else
786 /* quo = quo + 1; */
787 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
788 lquo, hquo);
789 }
790 else
791 return overflow;
792 }
793 break;
794
795 default:
796 abort ();
797 }
798
799 /* compute true remainder: rem = num - (quo * den) */
800 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
801 neg_double (*lrem, *hrem, lrem, hrem);
802 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
803 return overflow;
804 }
805 \f
806 /* Determine whether an expression T can be cheaply negated using
807 the function negate_expr. */
808
809 static bool
810 negate_expr_p (tree t)
811 {
812 unsigned HOST_WIDE_INT val;
813 unsigned int prec;
814 tree type;
815
816 if (t == 0)
817 return false;
818
819 type = TREE_TYPE (t);
820
821 STRIP_SIGN_NOPS (t);
822 switch (TREE_CODE (t))
823 {
824 case INTEGER_CST:
825 if (TREE_UNSIGNED (type))
826 return false;
827
828 /* Check that -CST will not overflow type. */
829 prec = TYPE_PRECISION (type);
830 if (prec > HOST_BITS_PER_WIDE_INT)
831 {
832 if (TREE_INT_CST_LOW (t) != 0)
833 return true;
834 prec -= HOST_BITS_PER_WIDE_INT;
835 val = TREE_INT_CST_HIGH (t);
836 }
837 else
838 val = TREE_INT_CST_LOW (t);
839 if (prec < HOST_BITS_PER_WIDE_INT)
840 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
841 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
842
843 case REAL_CST:
844 case NEGATE_EXPR:
845 return true;
846
847 case MINUS_EXPR:
848 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
849 return ! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations;
850
851 case MULT_EXPR:
852 if (TREE_UNSIGNED (TREE_TYPE (t)))
853 break;
854
855 /* Fall through. */
856
857 case RDIV_EXPR:
858 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
859 return negate_expr_p (TREE_OPERAND (t, 1))
860 || negate_expr_p (TREE_OPERAND (t, 0));
861 break;
862
863 default:
864 break;
865 }
866 return false;
867 }
868
869 /* Given T, an expression, return the negation of T. Allow for T to be
870 null, in which case return null. */
871
872 static tree
873 negate_expr (tree t)
874 {
875 tree type;
876 tree tem;
877
878 if (t == 0)
879 return 0;
880
881 type = TREE_TYPE (t);
882 STRIP_SIGN_NOPS (t);
883
884 switch (TREE_CODE (t))
885 {
886 case INTEGER_CST:
887 if (! TREE_UNSIGNED (type)
888 && 0 != (tem = fold (build1 (NEGATE_EXPR, type, t)))
889 && ! TREE_OVERFLOW (tem))
890 return tem;
891 break;
892
893 case REAL_CST:
894 tem = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (t)));
895 /* Two's complement FP formats, such as c4x, may overflow. */
896 if (! TREE_OVERFLOW (tem))
897 return convert (type, tem);
898 break;
899
900 case NEGATE_EXPR:
901 return convert (type, TREE_OPERAND (t, 0));
902
903 case MINUS_EXPR:
904 /* - (A - B) -> B - A */
905 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
906 return convert (type,
907 fold (build (MINUS_EXPR, TREE_TYPE (t),
908 TREE_OPERAND (t, 1),
909 TREE_OPERAND (t, 0))));
910 break;
911
912 case MULT_EXPR:
913 if (TREE_UNSIGNED (TREE_TYPE (t)))
914 break;
915
916 /* Fall through. */
917
918 case RDIV_EXPR:
919 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
920 {
921 tem = TREE_OPERAND (t, 1);
922 if (negate_expr_p (tem))
923 return convert (type,
924 fold (build (TREE_CODE (t), TREE_TYPE (t),
925 TREE_OPERAND (t, 0),
926 negate_expr (tem))));
927 tem = TREE_OPERAND (t, 0);
928 if (negate_expr_p (tem))
929 return convert (type,
930 fold (build (TREE_CODE (t), TREE_TYPE (t),
931 negate_expr (tem),
932 TREE_OPERAND (t, 1))));
933 }
934 break;
935
936 default:
937 break;
938 }
939
940 return convert (type, fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t)));
941 }
942 \f
943 /* Split a tree IN into a constant, literal and variable parts that could be
944 combined with CODE to make IN. "constant" means an expression with
945 TREE_CONSTANT but that isn't an actual constant. CODE must be a
946 commutative arithmetic operation. Store the constant part into *CONP,
947 the literal in *LITP and return the variable part. If a part isn't
948 present, set it to null. If the tree does not decompose in this way,
949 return the entire tree as the variable part and the other parts as null.
950
951 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
952 case, we negate an operand that was subtracted. Except if it is a
953 literal for which we use *MINUS_LITP instead.
954
955 If NEGATE_P is true, we are negating all of IN, again except a literal
956 for which we use *MINUS_LITP instead.
957
958 If IN is itself a literal or constant, return it as appropriate.
959
960 Note that we do not guarantee that any of the three values will be the
961 same type as IN, but they will have the same signedness and mode. */
962
963 static tree
964 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
965 tree *minus_litp, int negate_p)
966 {
967 tree var = 0;
968
969 *conp = 0;
970 *litp = 0;
971 *minus_litp = 0;
972
973 /* Strip any conversions that don't change the machine mode or signedness. */
974 STRIP_SIGN_NOPS (in);
975
976 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
977 *litp = in;
978 else if (TREE_CODE (in) == code
979 || (! FLOAT_TYPE_P (TREE_TYPE (in))
980 /* We can associate addition and subtraction together (even
981 though the C standard doesn't say so) for integers because
982 the value is not affected. For reals, the value might be
983 affected, so we can't. */
984 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
985 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
986 {
987 tree op0 = TREE_OPERAND (in, 0);
988 tree op1 = TREE_OPERAND (in, 1);
989 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
990 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
991
992 /* First see if either of the operands is a literal, then a constant. */
993 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
994 *litp = op0, op0 = 0;
995 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
996 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
997
998 if (op0 != 0 && TREE_CONSTANT (op0))
999 *conp = op0, op0 = 0;
1000 else if (op1 != 0 && TREE_CONSTANT (op1))
1001 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1002
1003 /* If we haven't dealt with either operand, this is not a case we can
1004 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1005 if (op0 != 0 && op1 != 0)
1006 var = in;
1007 else if (op0 != 0)
1008 var = op0;
1009 else
1010 var = op1, neg_var_p = neg1_p;
1011
1012 /* Now do any needed negations. */
1013 if (neg_litp_p)
1014 *minus_litp = *litp, *litp = 0;
1015 if (neg_conp_p)
1016 *conp = negate_expr (*conp);
1017 if (neg_var_p)
1018 var = negate_expr (var);
1019 }
1020 else if (TREE_CONSTANT (in))
1021 *conp = in;
1022 else
1023 var = in;
1024
1025 if (negate_p)
1026 {
1027 if (*litp)
1028 *minus_litp = *litp, *litp = 0;
1029 else if (*minus_litp)
1030 *litp = *minus_litp, *minus_litp = 0;
1031 *conp = negate_expr (*conp);
1032 var = negate_expr (var);
1033 }
1034
1035 return var;
1036 }
1037
1038 /* Re-associate trees split by the above function. T1 and T2 are either
1039 expressions to associate or null. Return the new expression, if any. If
1040 we build an operation, do it in TYPE and with CODE. */
1041
1042 static tree
1043 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1044 {
1045 if (t1 == 0)
1046 return t2;
1047 else if (t2 == 0)
1048 return t1;
1049
1050 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1051 try to fold this since we will have infinite recursion. But do
1052 deal with any NEGATE_EXPRs. */
1053 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1054 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1055 {
1056 if (code == PLUS_EXPR)
1057 {
1058 if (TREE_CODE (t1) == NEGATE_EXPR)
1059 return build (MINUS_EXPR, type, convert (type, t2),
1060 convert (type, TREE_OPERAND (t1, 0)));
1061 else if (TREE_CODE (t2) == NEGATE_EXPR)
1062 return build (MINUS_EXPR, type, convert (type, t1),
1063 convert (type, TREE_OPERAND (t2, 0)));
1064 }
1065 return build (code, type, convert (type, t1), convert (type, t2));
1066 }
1067
1068 return fold (build (code, type, convert (type, t1), convert (type, t2)));
1069 }
1070 \f
1071 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1072 to produce a new constant.
1073
1074 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1075
1076 static tree
1077 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1078 {
1079 unsigned HOST_WIDE_INT int1l, int2l;
1080 HOST_WIDE_INT int1h, int2h;
1081 unsigned HOST_WIDE_INT low;
1082 HOST_WIDE_INT hi;
1083 unsigned HOST_WIDE_INT garbagel;
1084 HOST_WIDE_INT garbageh;
1085 tree t;
1086 tree type = TREE_TYPE (arg1);
1087 int uns = TREE_UNSIGNED (type);
1088 int is_sizetype
1089 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1090 int overflow = 0;
1091 int no_overflow = 0;
1092
1093 int1l = TREE_INT_CST_LOW (arg1);
1094 int1h = TREE_INT_CST_HIGH (arg1);
1095 int2l = TREE_INT_CST_LOW (arg2);
1096 int2h = TREE_INT_CST_HIGH (arg2);
1097
1098 switch (code)
1099 {
1100 case BIT_IOR_EXPR:
1101 low = int1l | int2l, hi = int1h | int2h;
1102 break;
1103
1104 case BIT_XOR_EXPR:
1105 low = int1l ^ int2l, hi = int1h ^ int2h;
1106 break;
1107
1108 case BIT_AND_EXPR:
1109 low = int1l & int2l, hi = int1h & int2h;
1110 break;
1111
1112 case RSHIFT_EXPR:
1113 int2l = -int2l;
1114 case LSHIFT_EXPR:
1115 /* It's unclear from the C standard whether shifts can overflow.
1116 The following code ignores overflow; perhaps a C standard
1117 interpretation ruling is needed. */
1118 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1119 &low, &hi, !uns);
1120 no_overflow = 1;
1121 break;
1122
1123 case RROTATE_EXPR:
1124 int2l = - int2l;
1125 case LROTATE_EXPR:
1126 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1127 &low, &hi);
1128 break;
1129
1130 case PLUS_EXPR:
1131 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1132 break;
1133
1134 case MINUS_EXPR:
1135 neg_double (int2l, int2h, &low, &hi);
1136 add_double (int1l, int1h, low, hi, &low, &hi);
1137 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1138 break;
1139
1140 case MULT_EXPR:
1141 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1142 break;
1143
1144 case TRUNC_DIV_EXPR:
1145 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1146 case EXACT_DIV_EXPR:
1147 /* This is a shortcut for a common special case. */
1148 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1149 && ! TREE_CONSTANT_OVERFLOW (arg1)
1150 && ! TREE_CONSTANT_OVERFLOW (arg2)
1151 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1152 {
1153 if (code == CEIL_DIV_EXPR)
1154 int1l += int2l - 1;
1155
1156 low = int1l / int2l, hi = 0;
1157 break;
1158 }
1159
1160 /* ... fall through ... */
1161
1162 case ROUND_DIV_EXPR:
1163 if (int2h == 0 && int2l == 1)
1164 {
1165 low = int1l, hi = int1h;
1166 break;
1167 }
1168 if (int1l == int2l && int1h == int2h
1169 && ! (int1l == 0 && int1h == 0))
1170 {
1171 low = 1, hi = 0;
1172 break;
1173 }
1174 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1175 &low, &hi, &garbagel, &garbageh);
1176 break;
1177
1178 case TRUNC_MOD_EXPR:
1179 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1180 /* This is a shortcut for a common special case. */
1181 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1182 && ! TREE_CONSTANT_OVERFLOW (arg1)
1183 && ! TREE_CONSTANT_OVERFLOW (arg2)
1184 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1185 {
1186 if (code == CEIL_MOD_EXPR)
1187 int1l += int2l - 1;
1188 low = int1l % int2l, hi = 0;
1189 break;
1190 }
1191
1192 /* ... fall through ... */
1193
1194 case ROUND_MOD_EXPR:
1195 overflow = div_and_round_double (code, uns,
1196 int1l, int1h, int2l, int2h,
1197 &garbagel, &garbageh, &low, &hi);
1198 break;
1199
1200 case MIN_EXPR:
1201 case MAX_EXPR:
1202 if (uns)
1203 low = (((unsigned HOST_WIDE_INT) int1h
1204 < (unsigned HOST_WIDE_INT) int2h)
1205 || (((unsigned HOST_WIDE_INT) int1h
1206 == (unsigned HOST_WIDE_INT) int2h)
1207 && int1l < int2l));
1208 else
1209 low = (int1h < int2h
1210 || (int1h == int2h && int1l < int2l));
1211
1212 if (low == (code == MIN_EXPR))
1213 low = int1l, hi = int1h;
1214 else
1215 low = int2l, hi = int2h;
1216 break;
1217
1218 default:
1219 abort ();
1220 }
1221
1222 /* If this is for a sizetype, can be represented as one (signed)
1223 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1224 constants. */
1225 if (is_sizetype
1226 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1227 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1228 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1229 return size_int_type_wide (low, type);
1230 else
1231 {
1232 t = build_int_2 (low, hi);
1233 TREE_TYPE (t) = TREE_TYPE (arg1);
1234 }
1235
1236 TREE_OVERFLOW (t)
1237 = ((notrunc
1238 ? (!uns || is_sizetype) && overflow
1239 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1240 && ! no_overflow))
1241 | TREE_OVERFLOW (arg1)
1242 | TREE_OVERFLOW (arg2));
1243
1244 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1245 So check if force_fit_type truncated the value. */
1246 if (is_sizetype
1247 && ! TREE_OVERFLOW (t)
1248 && (TREE_INT_CST_HIGH (t) != hi
1249 || TREE_INT_CST_LOW (t) != low))
1250 TREE_OVERFLOW (t) = 1;
1251
1252 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1253 | TREE_CONSTANT_OVERFLOW (arg1)
1254 | TREE_CONSTANT_OVERFLOW (arg2));
1255 return t;
1256 }
1257
1258 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1259 constant. We assume ARG1 and ARG2 have the same data type, or at least
1260 are the same kind of constant and the same machine mode.
1261
1262 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1263
1264 static tree
1265 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1266 {
1267 STRIP_NOPS (arg1);
1268 STRIP_NOPS (arg2);
1269
1270 if (TREE_CODE (arg1) == INTEGER_CST)
1271 return int_const_binop (code, arg1, arg2, notrunc);
1272
1273 if (TREE_CODE (arg1) == REAL_CST)
1274 {
1275 enum machine_mode mode;
1276 REAL_VALUE_TYPE d1;
1277 REAL_VALUE_TYPE d2;
1278 REAL_VALUE_TYPE value;
1279 tree t, type;
1280
1281 d1 = TREE_REAL_CST (arg1);
1282 d2 = TREE_REAL_CST (arg2);
1283
1284 type = TREE_TYPE (arg1);
1285 mode = TYPE_MODE (type);
1286
1287 /* Don't perform operation if we honor signaling NaNs and
1288 either operand is a NaN. */
1289 if (HONOR_SNANS (mode)
1290 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1291 return NULL_TREE;
1292
1293 /* Don't perform operation if it would raise a division
1294 by zero exception. */
1295 if (code == RDIV_EXPR
1296 && REAL_VALUES_EQUAL (d2, dconst0)
1297 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1298 return NULL_TREE;
1299
1300 /* If either operand is a NaN, just return it. Otherwise, set up
1301 for floating-point trap; we return an overflow. */
1302 if (REAL_VALUE_ISNAN (d1))
1303 return arg1;
1304 else if (REAL_VALUE_ISNAN (d2))
1305 return arg2;
1306
1307 REAL_ARITHMETIC (value, code, d1, d2);
1308
1309 t = build_real (type, real_value_truncate (mode, value));
1310
1311 TREE_OVERFLOW (t)
1312 = (force_fit_type (t, 0)
1313 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1314 TREE_CONSTANT_OVERFLOW (t)
1315 = TREE_OVERFLOW (t)
1316 | TREE_CONSTANT_OVERFLOW (arg1)
1317 | TREE_CONSTANT_OVERFLOW (arg2);
1318 return t;
1319 }
1320 if (TREE_CODE (arg1) == COMPLEX_CST)
1321 {
1322 tree type = TREE_TYPE (arg1);
1323 tree r1 = TREE_REALPART (arg1);
1324 tree i1 = TREE_IMAGPART (arg1);
1325 tree r2 = TREE_REALPART (arg2);
1326 tree i2 = TREE_IMAGPART (arg2);
1327 tree t;
1328
1329 switch (code)
1330 {
1331 case PLUS_EXPR:
1332 t = build_complex (type,
1333 const_binop (PLUS_EXPR, r1, r2, notrunc),
1334 const_binop (PLUS_EXPR, i1, i2, notrunc));
1335 break;
1336
1337 case MINUS_EXPR:
1338 t = build_complex (type,
1339 const_binop (MINUS_EXPR, r1, r2, notrunc),
1340 const_binop (MINUS_EXPR, i1, i2, notrunc));
1341 break;
1342
1343 case MULT_EXPR:
1344 t = build_complex (type,
1345 const_binop (MINUS_EXPR,
1346 const_binop (MULT_EXPR,
1347 r1, r2, notrunc),
1348 const_binop (MULT_EXPR,
1349 i1, i2, notrunc),
1350 notrunc),
1351 const_binop (PLUS_EXPR,
1352 const_binop (MULT_EXPR,
1353 r1, i2, notrunc),
1354 const_binop (MULT_EXPR,
1355 i1, r2, notrunc),
1356 notrunc));
1357 break;
1358
1359 case RDIV_EXPR:
1360 {
1361 tree magsquared
1362 = const_binop (PLUS_EXPR,
1363 const_binop (MULT_EXPR, r2, r2, notrunc),
1364 const_binop (MULT_EXPR, i2, i2, notrunc),
1365 notrunc);
1366
1367 t = build_complex (type,
1368 const_binop
1369 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1370 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1371 const_binop (PLUS_EXPR,
1372 const_binop (MULT_EXPR, r1, r2,
1373 notrunc),
1374 const_binop (MULT_EXPR, i1, i2,
1375 notrunc),
1376 notrunc),
1377 magsquared, notrunc),
1378 const_binop
1379 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1380 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1381 const_binop (MINUS_EXPR,
1382 const_binop (MULT_EXPR, i1, r2,
1383 notrunc),
1384 const_binop (MULT_EXPR, r1, i2,
1385 notrunc),
1386 notrunc),
1387 magsquared, notrunc));
1388 }
1389 break;
1390
1391 default:
1392 abort ();
1393 }
1394 return t;
1395 }
1396 return 0;
1397 }
1398
1399 /* These are the hash table functions for the hash table of INTEGER_CST
1400 nodes of a sizetype. */
1401
1402 /* Return the hash code code X, an INTEGER_CST. */
1403
1404 static hashval_t
1405 size_htab_hash (const void *x)
1406 {
1407 tree t = (tree) x;
1408
1409 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1410 ^ htab_hash_pointer (TREE_TYPE (t))
1411 ^ (TREE_OVERFLOW (t) << 20));
1412 }
1413
1414 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1415 is the same as that given by *Y, which is the same. */
1416
1417 static int
1418 size_htab_eq (const void *x, const void *y)
1419 {
1420 tree xt = (tree) x;
1421 tree yt = (tree) y;
1422
1423 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1424 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1425 && TREE_TYPE (xt) == TREE_TYPE (yt)
1426 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1427 }
1428 \f
1429 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1430 bits are given by NUMBER and of the sizetype represented by KIND. */
1431
1432 tree
1433 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1434 {
1435 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1436 }
1437
1438 /* Likewise, but the desired type is specified explicitly. */
1439
1440 static GTY (()) tree new_const;
1441 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1442 htab_t size_htab;
1443
1444 tree
1445 size_int_type_wide (HOST_WIDE_INT number, tree type)
1446 {
1447 void **slot;
1448
1449 if (size_htab == 0)
1450 {
1451 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1452 new_const = make_node (INTEGER_CST);
1453 }
1454
1455 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1456 hash table, we return the value from the hash table. Otherwise, we
1457 place that in the hash table and make a new node for the next time. */
1458 TREE_INT_CST_LOW (new_const) = number;
1459 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1460 TREE_TYPE (new_const) = type;
1461 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1462 = force_fit_type (new_const, 0);
1463
1464 slot = htab_find_slot (size_htab, new_const, INSERT);
1465 if (*slot == 0)
1466 {
1467 tree t = new_const;
1468
1469 *slot = new_const;
1470 new_const = make_node (INTEGER_CST);
1471 return t;
1472 }
1473 else
1474 return (tree) *slot;
1475 }
1476
1477 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1478 is a tree code. The type of the result is taken from the operands.
1479 Both must be the same type integer type and it must be a size type.
1480 If the operands are constant, so is the result. */
1481
1482 tree
1483 size_binop (enum tree_code code, tree arg0, tree arg1)
1484 {
1485 tree type = TREE_TYPE (arg0);
1486
1487 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1488 || type != TREE_TYPE (arg1))
1489 abort ();
1490
1491 /* Handle the special case of two integer constants faster. */
1492 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1493 {
1494 /* And some specific cases even faster than that. */
1495 if (code == PLUS_EXPR && integer_zerop (arg0))
1496 return arg1;
1497 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1498 && integer_zerop (arg1))
1499 return arg0;
1500 else if (code == MULT_EXPR && integer_onep (arg0))
1501 return arg1;
1502
1503 /* Handle general case of two integer constants. */
1504 return int_const_binop (code, arg0, arg1, 0);
1505 }
1506
1507 if (arg0 == error_mark_node || arg1 == error_mark_node)
1508 return error_mark_node;
1509
1510 return fold (build (code, type, arg0, arg1));
1511 }
1512
1513 /* Given two values, either both of sizetype or both of bitsizetype,
1514 compute the difference between the two values. Return the value
1515 in signed type corresponding to the type of the operands. */
1516
1517 tree
1518 size_diffop (tree arg0, tree arg1)
1519 {
1520 tree type = TREE_TYPE (arg0);
1521 tree ctype;
1522
1523 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1524 || type != TREE_TYPE (arg1))
1525 abort ();
1526
1527 /* If the type is already signed, just do the simple thing. */
1528 if (! TREE_UNSIGNED (type))
1529 return size_binop (MINUS_EXPR, arg0, arg1);
1530
1531 ctype = (type == bitsizetype || type == ubitsizetype
1532 ? sbitsizetype : ssizetype);
1533
1534 /* If either operand is not a constant, do the conversions to the signed
1535 type and subtract. The hardware will do the right thing with any
1536 overflow in the subtraction. */
1537 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1538 return size_binop (MINUS_EXPR, convert (ctype, arg0),
1539 convert (ctype, arg1));
1540
1541 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1542 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1543 overflow) and negate (which can't either). Special-case a result
1544 of zero while we're here. */
1545 if (tree_int_cst_equal (arg0, arg1))
1546 return convert (ctype, integer_zero_node);
1547 else if (tree_int_cst_lt (arg1, arg0))
1548 return convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1549 else
1550 return size_binop (MINUS_EXPR, convert (ctype, integer_zero_node),
1551 convert (ctype, size_binop (MINUS_EXPR, arg1, arg0)));
1552 }
1553 \f
1554
1555 /* Given T, a tree representing type conversion of ARG1, a constant,
1556 return a constant tree representing the result of conversion. */
1557
1558 static tree
1559 fold_convert (tree t, tree arg1)
1560 {
1561 tree type = TREE_TYPE (t);
1562 int overflow = 0;
1563
1564 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1565 {
1566 if (TREE_CODE (arg1) == INTEGER_CST)
1567 {
1568 /* If we would build a constant wider than GCC supports,
1569 leave the conversion unfolded. */
1570 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1571 return t;
1572
1573 /* If we are trying to make a sizetype for a small integer, use
1574 size_int to pick up cached types to reduce duplicate nodes. */
1575 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1576 && !TREE_CONSTANT_OVERFLOW (arg1)
1577 && compare_tree_int (arg1, 10000) < 0)
1578 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1579
1580 /* Given an integer constant, make new constant with new type,
1581 appropriately sign-extended or truncated. */
1582 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1583 TREE_INT_CST_HIGH (arg1));
1584 TREE_TYPE (t) = type;
1585 /* Indicate an overflow if (1) ARG1 already overflowed,
1586 or (2) force_fit_type indicates an overflow.
1587 Tell force_fit_type that an overflow has already occurred
1588 if ARG1 is a too-large unsigned value and T is signed.
1589 But don't indicate an overflow if converting a pointer. */
1590 TREE_OVERFLOW (t)
1591 = ((force_fit_type (t,
1592 (TREE_INT_CST_HIGH (arg1) < 0
1593 && (TREE_UNSIGNED (type)
1594 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1595 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1596 || TREE_OVERFLOW (arg1));
1597 TREE_CONSTANT_OVERFLOW (t)
1598 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1599 }
1600 else if (TREE_CODE (arg1) == REAL_CST)
1601 {
1602 /* Don't initialize these, use assignments.
1603 Initialized local aggregates don't work on old compilers. */
1604 REAL_VALUE_TYPE x;
1605 REAL_VALUE_TYPE l;
1606 REAL_VALUE_TYPE u;
1607 tree type1 = TREE_TYPE (arg1);
1608 int no_upper_bound;
1609
1610 x = TREE_REAL_CST (arg1);
1611 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1612
1613 no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
1614 if (!no_upper_bound)
1615 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1616
1617 /* See if X will be in range after truncation towards 0.
1618 To compensate for truncation, move the bounds away from 0,
1619 but reject if X exactly equals the adjusted bounds. */
1620 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1621 if (!no_upper_bound)
1622 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1623 /* If X is a NaN, use zero instead and show we have an overflow.
1624 Otherwise, range check. */
1625 if (REAL_VALUE_ISNAN (x))
1626 overflow = 1, x = dconst0;
1627 else if (! (REAL_VALUES_LESS (l, x)
1628 && !no_upper_bound
1629 && REAL_VALUES_LESS (x, u)))
1630 overflow = 1;
1631
1632 {
1633 HOST_WIDE_INT low, high;
1634 REAL_VALUE_TO_INT (&low, &high, x);
1635 t = build_int_2 (low, high);
1636 }
1637 TREE_TYPE (t) = type;
1638 TREE_OVERFLOW (t)
1639 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1640 TREE_CONSTANT_OVERFLOW (t)
1641 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1642 }
1643 TREE_TYPE (t) = type;
1644 }
1645 else if (TREE_CODE (type) == REAL_TYPE)
1646 {
1647 if (TREE_CODE (arg1) == INTEGER_CST)
1648 return build_real_from_int_cst (type, arg1);
1649 if (TREE_CODE (arg1) == REAL_CST)
1650 {
1651 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1652 {
1653 /* We make a copy of ARG1 so that we don't modify an
1654 existing constant tree. */
1655 t = copy_node (arg1);
1656 TREE_TYPE (t) = type;
1657 return t;
1658 }
1659
1660 t = build_real (type,
1661 real_value_truncate (TYPE_MODE (type),
1662 TREE_REAL_CST (arg1)));
1663
1664 TREE_OVERFLOW (t)
1665 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1666 TREE_CONSTANT_OVERFLOW (t)
1667 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1668 return t;
1669 }
1670 }
1671 TREE_CONSTANT (t) = 1;
1672 return t;
1673 }
1674 \f
1675 /* Return an expr equal to X but certainly not valid as an lvalue. */
1676
1677 tree
1678 non_lvalue (tree x)
1679 {
1680 tree result;
1681
1682 /* These things are certainly not lvalues. */
1683 if (TREE_CODE (x) == NON_LVALUE_EXPR
1684 || TREE_CODE (x) == INTEGER_CST
1685 || TREE_CODE (x) == REAL_CST
1686 || TREE_CODE (x) == STRING_CST
1687 || TREE_CODE (x) == ADDR_EXPR)
1688 return x;
1689
1690 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1691 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1692 return result;
1693 }
1694
1695 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1696 Zero means allow extended lvalues. */
1697
1698 int pedantic_lvalues;
1699
1700 /* When pedantic, return an expr equal to X but certainly not valid as a
1701 pedantic lvalue. Otherwise, return X. */
1702
1703 tree
1704 pedantic_non_lvalue (tree x)
1705 {
1706 if (pedantic_lvalues)
1707 return non_lvalue (x);
1708 else
1709 return x;
1710 }
1711 \f
1712 /* Given a tree comparison code, return the code that is the logical inverse
1713 of the given code. It is not safe to do this for floating-point
1714 comparisons, except for NE_EXPR and EQ_EXPR. */
1715
1716 static enum tree_code
1717 invert_tree_comparison (enum tree_code code)
1718 {
1719 switch (code)
1720 {
1721 case EQ_EXPR:
1722 return NE_EXPR;
1723 case NE_EXPR:
1724 return EQ_EXPR;
1725 case GT_EXPR:
1726 return LE_EXPR;
1727 case GE_EXPR:
1728 return LT_EXPR;
1729 case LT_EXPR:
1730 return GE_EXPR;
1731 case LE_EXPR:
1732 return GT_EXPR;
1733 default:
1734 abort ();
1735 }
1736 }
1737
1738 /* Similar, but return the comparison that results if the operands are
1739 swapped. This is safe for floating-point. */
1740
1741 static enum tree_code
1742 swap_tree_comparison (enum tree_code code)
1743 {
1744 switch (code)
1745 {
1746 case EQ_EXPR:
1747 case NE_EXPR:
1748 return code;
1749 case GT_EXPR:
1750 return LT_EXPR;
1751 case GE_EXPR:
1752 return LE_EXPR;
1753 case LT_EXPR:
1754 return GT_EXPR;
1755 case LE_EXPR:
1756 return GE_EXPR;
1757 default:
1758 abort ();
1759 }
1760 }
1761
1762
1763 /* Convert a comparison tree code from an enum tree_code representation
1764 into a compcode bit-based encoding. This function is the inverse of
1765 compcode_to_comparison. */
1766
1767 static int
1768 comparison_to_compcode (enum tree_code code)
1769 {
1770 switch (code)
1771 {
1772 case LT_EXPR:
1773 return COMPCODE_LT;
1774 case EQ_EXPR:
1775 return COMPCODE_EQ;
1776 case LE_EXPR:
1777 return COMPCODE_LE;
1778 case GT_EXPR:
1779 return COMPCODE_GT;
1780 case NE_EXPR:
1781 return COMPCODE_NE;
1782 case GE_EXPR:
1783 return COMPCODE_GE;
1784 default:
1785 abort ();
1786 }
1787 }
1788
1789 /* Convert a compcode bit-based encoding of a comparison operator back
1790 to GCC's enum tree_code representation. This function is the
1791 inverse of comparison_to_compcode. */
1792
1793 static enum tree_code
1794 compcode_to_comparison (int code)
1795 {
1796 switch (code)
1797 {
1798 case COMPCODE_LT:
1799 return LT_EXPR;
1800 case COMPCODE_EQ:
1801 return EQ_EXPR;
1802 case COMPCODE_LE:
1803 return LE_EXPR;
1804 case COMPCODE_GT:
1805 return GT_EXPR;
1806 case COMPCODE_NE:
1807 return NE_EXPR;
1808 case COMPCODE_GE:
1809 return GE_EXPR;
1810 default:
1811 abort ();
1812 }
1813 }
1814
1815 /* Return nonzero if CODE is a tree code that represents a truth value. */
1816
1817 static int
1818 truth_value_p (enum tree_code code)
1819 {
1820 return (TREE_CODE_CLASS (code) == '<'
1821 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1822 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1823 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1824 }
1825 \f
1826 /* Return nonzero if two operands (typically of the same tree node)
1827 are necessarily equal. If either argument has side-effects this
1828 function returns zero.
1829
1830 If ONLY_CONST is nonzero, only return nonzero for constants.
1831 This function tests whether the operands are indistinguishable;
1832 it does not test whether they are equal using C's == operation.
1833 The distinction is important for IEEE floating point, because
1834 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1835 (2) two NaNs may be indistinguishable, but NaN!=NaN.
1836
1837 If ONLY_CONST is zero, a VAR_DECL is considered equal to itself
1838 even though it may hold multiple values during a function.
1839 This is because a GCC tree node guarantees that nothing else is
1840 executed between the evaluation of its "operands" (which may often
1841 be evaluated in arbitrary order). Hence if the operands themselves
1842 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
1843 same value in each operand/subexpression. Hence a zero value for
1844 ONLY_CONST assumes isochronic (or instantaneous) tree equivalence.
1845 If comparing arbitrary expression trees, such as from different
1846 statements, ONLY_CONST must usually be nonzero. */
1847
1848 int
1849 operand_equal_p (tree arg0, tree arg1, int only_const)
1850 {
1851 tree fndecl;
1852
1853 /* If both types don't have the same signedness, then we can't consider
1854 them equal. We must check this before the STRIP_NOPS calls
1855 because they may change the signedness of the arguments. */
1856 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1857 return 0;
1858
1859 STRIP_NOPS (arg0);
1860 STRIP_NOPS (arg1);
1861
1862 if (TREE_CODE (arg0) != TREE_CODE (arg1)
1863 /* This is needed for conversions and for COMPONENT_REF.
1864 Might as well play it safe and always test this. */
1865 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
1866 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
1867 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1868 return 0;
1869
1870 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1871 We don't care about side effects in that case because the SAVE_EXPR
1872 takes care of that for us. In all other cases, two expressions are
1873 equal if they have no side effects. If we have two identical
1874 expressions with side effects that should be treated the same due
1875 to the only side effects being identical SAVE_EXPR's, that will
1876 be detected in the recursive calls below. */
1877 if (arg0 == arg1 && ! only_const
1878 && (TREE_CODE (arg0) == SAVE_EXPR
1879 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
1880 return 1;
1881
1882 /* Next handle constant cases, those for which we can return 1 even
1883 if ONLY_CONST is set. */
1884 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
1885 switch (TREE_CODE (arg0))
1886 {
1887 case INTEGER_CST:
1888 return (! TREE_CONSTANT_OVERFLOW (arg0)
1889 && ! TREE_CONSTANT_OVERFLOW (arg1)
1890 && tree_int_cst_equal (arg0, arg1));
1891
1892 case REAL_CST:
1893 return (! TREE_CONSTANT_OVERFLOW (arg0)
1894 && ! TREE_CONSTANT_OVERFLOW (arg1)
1895 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
1896 TREE_REAL_CST (arg1)));
1897
1898 case VECTOR_CST:
1899 {
1900 tree v1, v2;
1901
1902 if (TREE_CONSTANT_OVERFLOW (arg0)
1903 || TREE_CONSTANT_OVERFLOW (arg1))
1904 return 0;
1905
1906 v1 = TREE_VECTOR_CST_ELTS (arg0);
1907 v2 = TREE_VECTOR_CST_ELTS (arg1);
1908 while (v1 && v2)
1909 {
1910 if (!operand_equal_p (v1, v2, only_const))
1911 return 0;
1912 v1 = TREE_CHAIN (v1);
1913 v2 = TREE_CHAIN (v2);
1914 }
1915
1916 return 1;
1917 }
1918
1919 case COMPLEX_CST:
1920 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
1921 only_const)
1922 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
1923 only_const));
1924
1925 case STRING_CST:
1926 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
1927 && ! memcmp (TREE_STRING_POINTER (arg0),
1928 TREE_STRING_POINTER (arg1),
1929 TREE_STRING_LENGTH (arg0)));
1930
1931 case ADDR_EXPR:
1932 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
1933 0);
1934 default:
1935 break;
1936 }
1937
1938 if (only_const)
1939 return 0;
1940
1941 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1942 {
1943 case '1':
1944 /* Two conversions are equal only if signedness and modes match. */
1945 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1946 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1947 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1948 return 0;
1949
1950 return operand_equal_p (TREE_OPERAND (arg0, 0),
1951 TREE_OPERAND (arg1, 0), 0);
1952
1953 case '<':
1954 case '2':
1955 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
1956 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
1957 0))
1958 return 1;
1959
1960 /* For commutative ops, allow the other order. */
1961 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
1962 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
1963 || TREE_CODE (arg0) == BIT_IOR_EXPR
1964 || TREE_CODE (arg0) == BIT_XOR_EXPR
1965 || TREE_CODE (arg0) == BIT_AND_EXPR
1966 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
1967 && operand_equal_p (TREE_OPERAND (arg0, 0),
1968 TREE_OPERAND (arg1, 1), 0)
1969 && operand_equal_p (TREE_OPERAND (arg0, 1),
1970 TREE_OPERAND (arg1, 0), 0));
1971
1972 case 'r':
1973 /* If either of the pointer (or reference) expressions we are
1974 dereferencing contain a side effect, these cannot be equal. */
1975 if (TREE_SIDE_EFFECTS (arg0)
1976 || TREE_SIDE_EFFECTS (arg1))
1977 return 0;
1978
1979 switch (TREE_CODE (arg0))
1980 {
1981 case INDIRECT_REF:
1982 return operand_equal_p (TREE_OPERAND (arg0, 0),
1983 TREE_OPERAND (arg1, 0), 0);
1984
1985 case COMPONENT_REF:
1986 case ARRAY_REF:
1987 case ARRAY_RANGE_REF:
1988 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1989 TREE_OPERAND (arg1, 0), 0)
1990 && operand_equal_p (TREE_OPERAND (arg0, 1),
1991 TREE_OPERAND (arg1, 1), 0));
1992
1993 case BIT_FIELD_REF:
1994 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1995 TREE_OPERAND (arg1, 0), 0)
1996 && operand_equal_p (TREE_OPERAND (arg0, 1),
1997 TREE_OPERAND (arg1, 1), 0)
1998 && operand_equal_p (TREE_OPERAND (arg0, 2),
1999 TREE_OPERAND (arg1, 2), 0));
2000 default:
2001 return 0;
2002 }
2003
2004 case 'e':
2005 switch (TREE_CODE (arg0))
2006 {
2007 case ADDR_EXPR:
2008 case TRUTH_NOT_EXPR:
2009 return operand_equal_p (TREE_OPERAND (arg0, 0),
2010 TREE_OPERAND (arg1, 0), 0);
2011
2012 case RTL_EXPR:
2013 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2014
2015 case CALL_EXPR:
2016 /* If the CALL_EXPRs call different functions, then they
2017 clearly can not be equal. */
2018 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2019 TREE_OPERAND (arg1, 0), 0))
2020 return 0;
2021
2022 /* Only consider const functions equivalent. */
2023 fndecl = get_callee_fndecl (arg0);
2024 if (fndecl == NULL_TREE
2025 || ! (flags_from_decl_or_type (fndecl) & ECF_CONST))
2026 return 0;
2027
2028 /* Now see if all the arguments are the same. operand_equal_p
2029 does not handle TREE_LIST, so we walk the operands here
2030 feeding them to operand_equal_p. */
2031 arg0 = TREE_OPERAND (arg0, 1);
2032 arg1 = TREE_OPERAND (arg1, 1);
2033 while (arg0 && arg1)
2034 {
2035 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), 0))
2036 return 0;
2037
2038 arg0 = TREE_CHAIN (arg0);
2039 arg1 = TREE_CHAIN (arg1);
2040 }
2041
2042 /* If we get here and both argument lists are exhausted
2043 then the CALL_EXPRs are equal. */
2044 return ! (arg0 || arg1);
2045
2046 default:
2047 return 0;
2048 }
2049
2050 case 'd':
2051 /* Consider __builtin_sqrt equal to sqrt. */
2052 return TREE_CODE (arg0) == FUNCTION_DECL
2053 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2054 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2055 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1);
2056
2057 default:
2058 return 0;
2059 }
2060 }
2061 \f
2062 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2063 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2064
2065 When in doubt, return 0. */
2066
2067 static int
2068 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2069 {
2070 int unsignedp1, unsignedpo;
2071 tree primarg0, primarg1, primother;
2072 unsigned int correct_width;
2073
2074 if (operand_equal_p (arg0, arg1, 0))
2075 return 1;
2076
2077 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2078 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2079 return 0;
2080
2081 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2082 and see if the inner values are the same. This removes any
2083 signedness comparison, which doesn't matter here. */
2084 primarg0 = arg0, primarg1 = arg1;
2085 STRIP_NOPS (primarg0);
2086 STRIP_NOPS (primarg1);
2087 if (operand_equal_p (primarg0, primarg1, 0))
2088 return 1;
2089
2090 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2091 actual comparison operand, ARG0.
2092
2093 First throw away any conversions to wider types
2094 already present in the operands. */
2095
2096 primarg1 = get_narrower (arg1, &unsignedp1);
2097 primother = get_narrower (other, &unsignedpo);
2098
2099 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2100 if (unsignedp1 == unsignedpo
2101 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2102 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2103 {
2104 tree type = TREE_TYPE (arg0);
2105
2106 /* Make sure shorter operand is extended the right way
2107 to match the longer operand. */
2108 primarg1 = convert ((*lang_hooks.types.signed_or_unsigned_type)
2109 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2110
2111 if (operand_equal_p (arg0, convert (type, primarg1), 0))
2112 return 1;
2113 }
2114
2115 return 0;
2116 }
2117 \f
2118 /* See if ARG is an expression that is either a comparison or is performing
2119 arithmetic on comparisons. The comparisons must only be comparing
2120 two different values, which will be stored in *CVAL1 and *CVAL2; if
2121 they are nonzero it means that some operands have already been found.
2122 No variables may be used anywhere else in the expression except in the
2123 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2124 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2125
2126 If this is true, return 1. Otherwise, return zero. */
2127
2128 static int
2129 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2130 {
2131 enum tree_code code = TREE_CODE (arg);
2132 char class = TREE_CODE_CLASS (code);
2133
2134 /* We can handle some of the 'e' cases here. */
2135 if (class == 'e' && code == TRUTH_NOT_EXPR)
2136 class = '1';
2137 else if (class == 'e'
2138 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2139 || code == COMPOUND_EXPR))
2140 class = '2';
2141
2142 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2143 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2144 {
2145 /* If we've already found a CVAL1 or CVAL2, this expression is
2146 two complex to handle. */
2147 if (*cval1 || *cval2)
2148 return 0;
2149
2150 class = '1';
2151 *save_p = 1;
2152 }
2153
2154 switch (class)
2155 {
2156 case '1':
2157 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2158
2159 case '2':
2160 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2161 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2162 cval1, cval2, save_p));
2163
2164 case 'c':
2165 return 1;
2166
2167 case 'e':
2168 if (code == COND_EXPR)
2169 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2170 cval1, cval2, save_p)
2171 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2172 cval1, cval2, save_p)
2173 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2174 cval1, cval2, save_p));
2175 return 0;
2176
2177 case '<':
2178 /* First see if we can handle the first operand, then the second. For
2179 the second operand, we know *CVAL1 can't be zero. It must be that
2180 one side of the comparison is each of the values; test for the
2181 case where this isn't true by failing if the two operands
2182 are the same. */
2183
2184 if (operand_equal_p (TREE_OPERAND (arg, 0),
2185 TREE_OPERAND (arg, 1), 0))
2186 return 0;
2187
2188 if (*cval1 == 0)
2189 *cval1 = TREE_OPERAND (arg, 0);
2190 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2191 ;
2192 else if (*cval2 == 0)
2193 *cval2 = TREE_OPERAND (arg, 0);
2194 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2195 ;
2196 else
2197 return 0;
2198
2199 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2200 ;
2201 else if (*cval2 == 0)
2202 *cval2 = TREE_OPERAND (arg, 1);
2203 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2204 ;
2205 else
2206 return 0;
2207
2208 return 1;
2209
2210 default:
2211 return 0;
2212 }
2213 }
2214 \f
2215 /* ARG is a tree that is known to contain just arithmetic operations and
2216 comparisons. Evaluate the operations in the tree substituting NEW0 for
2217 any occurrence of OLD0 as an operand of a comparison and likewise for
2218 NEW1 and OLD1. */
2219
2220 static tree
2221 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2222 {
2223 tree type = TREE_TYPE (arg);
2224 enum tree_code code = TREE_CODE (arg);
2225 char class = TREE_CODE_CLASS (code);
2226
2227 /* We can handle some of the 'e' cases here. */
2228 if (class == 'e' && code == TRUTH_NOT_EXPR)
2229 class = '1';
2230 else if (class == 'e'
2231 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2232 class = '2';
2233
2234 switch (class)
2235 {
2236 case '1':
2237 return fold (build1 (code, type,
2238 eval_subst (TREE_OPERAND (arg, 0),
2239 old0, new0, old1, new1)));
2240
2241 case '2':
2242 return fold (build (code, type,
2243 eval_subst (TREE_OPERAND (arg, 0),
2244 old0, new0, old1, new1),
2245 eval_subst (TREE_OPERAND (arg, 1),
2246 old0, new0, old1, new1)));
2247
2248 case 'e':
2249 switch (code)
2250 {
2251 case SAVE_EXPR:
2252 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2253
2254 case COMPOUND_EXPR:
2255 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2256
2257 case COND_EXPR:
2258 return fold (build (code, type,
2259 eval_subst (TREE_OPERAND (arg, 0),
2260 old0, new0, old1, new1),
2261 eval_subst (TREE_OPERAND (arg, 1),
2262 old0, new0, old1, new1),
2263 eval_subst (TREE_OPERAND (arg, 2),
2264 old0, new0, old1, new1)));
2265 default:
2266 break;
2267 }
2268 /* Fall through - ??? */
2269
2270 case '<':
2271 {
2272 tree arg0 = TREE_OPERAND (arg, 0);
2273 tree arg1 = TREE_OPERAND (arg, 1);
2274
2275 /* We need to check both for exact equality and tree equality. The
2276 former will be true if the operand has a side-effect. In that
2277 case, we know the operand occurred exactly once. */
2278
2279 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2280 arg0 = new0;
2281 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2282 arg0 = new1;
2283
2284 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2285 arg1 = new0;
2286 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2287 arg1 = new1;
2288
2289 return fold (build (code, type, arg0, arg1));
2290 }
2291
2292 default:
2293 return arg;
2294 }
2295 }
2296 \f
2297 /* Return a tree for the case when the result of an expression is RESULT
2298 converted to TYPE and OMITTED was previously an operand of the expression
2299 but is now not needed (e.g., we folded OMITTED * 0).
2300
2301 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2302 the conversion of RESULT to TYPE. */
2303
2304 tree
2305 omit_one_operand (tree type, tree result, tree omitted)
2306 {
2307 tree t = convert (type, result);
2308
2309 if (TREE_SIDE_EFFECTS (omitted))
2310 return build (COMPOUND_EXPR, type, omitted, t);
2311
2312 return non_lvalue (t);
2313 }
2314
2315 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2316
2317 static tree
2318 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2319 {
2320 tree t = convert (type, result);
2321
2322 if (TREE_SIDE_EFFECTS (omitted))
2323 return build (COMPOUND_EXPR, type, omitted, t);
2324
2325 return pedantic_non_lvalue (t);
2326 }
2327 \f
2328 /* Return a simplified tree node for the truth-negation of ARG. This
2329 never alters ARG itself. We assume that ARG is an operation that
2330 returns a truth value (0 or 1). */
2331
2332 tree
2333 invert_truthvalue (tree arg)
2334 {
2335 tree type = TREE_TYPE (arg);
2336 enum tree_code code = TREE_CODE (arg);
2337
2338 if (code == ERROR_MARK)
2339 return arg;
2340
2341 /* If this is a comparison, we can simply invert it, except for
2342 floating-point non-equality comparisons, in which case we just
2343 enclose a TRUTH_NOT_EXPR around what we have. */
2344
2345 if (TREE_CODE_CLASS (code) == '<')
2346 {
2347 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2348 && !flag_unsafe_math_optimizations
2349 && code != NE_EXPR
2350 && code != EQ_EXPR)
2351 return build1 (TRUTH_NOT_EXPR, type, arg);
2352 else
2353 return build (invert_tree_comparison (code), type,
2354 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2355 }
2356
2357 switch (code)
2358 {
2359 case INTEGER_CST:
2360 return convert (type, build_int_2 (integer_zerop (arg), 0));
2361
2362 case TRUTH_AND_EXPR:
2363 return build (TRUTH_OR_EXPR, type,
2364 invert_truthvalue (TREE_OPERAND (arg, 0)),
2365 invert_truthvalue (TREE_OPERAND (arg, 1)));
2366
2367 case TRUTH_OR_EXPR:
2368 return build (TRUTH_AND_EXPR, type,
2369 invert_truthvalue (TREE_OPERAND (arg, 0)),
2370 invert_truthvalue (TREE_OPERAND (arg, 1)));
2371
2372 case TRUTH_XOR_EXPR:
2373 /* Here we can invert either operand. We invert the first operand
2374 unless the second operand is a TRUTH_NOT_EXPR in which case our
2375 result is the XOR of the first operand with the inside of the
2376 negation of the second operand. */
2377
2378 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2379 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2380 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2381 else
2382 return build (TRUTH_XOR_EXPR, type,
2383 invert_truthvalue (TREE_OPERAND (arg, 0)),
2384 TREE_OPERAND (arg, 1));
2385
2386 case TRUTH_ANDIF_EXPR:
2387 return build (TRUTH_ORIF_EXPR, type,
2388 invert_truthvalue (TREE_OPERAND (arg, 0)),
2389 invert_truthvalue (TREE_OPERAND (arg, 1)));
2390
2391 case TRUTH_ORIF_EXPR:
2392 return build (TRUTH_ANDIF_EXPR, type,
2393 invert_truthvalue (TREE_OPERAND (arg, 0)),
2394 invert_truthvalue (TREE_OPERAND (arg, 1)));
2395
2396 case TRUTH_NOT_EXPR:
2397 return TREE_OPERAND (arg, 0);
2398
2399 case COND_EXPR:
2400 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2401 invert_truthvalue (TREE_OPERAND (arg, 1)),
2402 invert_truthvalue (TREE_OPERAND (arg, 2)));
2403
2404 case COMPOUND_EXPR:
2405 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2406 invert_truthvalue (TREE_OPERAND (arg, 1)));
2407
2408 case WITH_RECORD_EXPR:
2409 return build (WITH_RECORD_EXPR, type,
2410 invert_truthvalue (TREE_OPERAND (arg, 0)),
2411 TREE_OPERAND (arg, 1));
2412
2413 case NON_LVALUE_EXPR:
2414 return invert_truthvalue (TREE_OPERAND (arg, 0));
2415
2416 case NOP_EXPR:
2417 case CONVERT_EXPR:
2418 case FLOAT_EXPR:
2419 return build1 (TREE_CODE (arg), type,
2420 invert_truthvalue (TREE_OPERAND (arg, 0)));
2421
2422 case BIT_AND_EXPR:
2423 if (!integer_onep (TREE_OPERAND (arg, 1)))
2424 break;
2425 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2426
2427 case SAVE_EXPR:
2428 return build1 (TRUTH_NOT_EXPR, type, arg);
2429
2430 case CLEANUP_POINT_EXPR:
2431 return build1 (CLEANUP_POINT_EXPR, type,
2432 invert_truthvalue (TREE_OPERAND (arg, 0)));
2433
2434 default:
2435 break;
2436 }
2437 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2438 abort ();
2439 return build1 (TRUTH_NOT_EXPR, type, arg);
2440 }
2441
2442 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2443 operands are another bit-wise operation with a common input. If so,
2444 distribute the bit operations to save an operation and possibly two if
2445 constants are involved. For example, convert
2446 (A | B) & (A | C) into A | (B & C)
2447 Further simplification will occur if B and C are constants.
2448
2449 If this optimization cannot be done, 0 will be returned. */
2450
2451 static tree
2452 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2453 {
2454 tree common;
2455 tree left, right;
2456
2457 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2458 || TREE_CODE (arg0) == code
2459 || (TREE_CODE (arg0) != BIT_AND_EXPR
2460 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2461 return 0;
2462
2463 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2464 {
2465 common = TREE_OPERAND (arg0, 0);
2466 left = TREE_OPERAND (arg0, 1);
2467 right = TREE_OPERAND (arg1, 1);
2468 }
2469 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2470 {
2471 common = TREE_OPERAND (arg0, 0);
2472 left = TREE_OPERAND (arg0, 1);
2473 right = TREE_OPERAND (arg1, 0);
2474 }
2475 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2476 {
2477 common = TREE_OPERAND (arg0, 1);
2478 left = TREE_OPERAND (arg0, 0);
2479 right = TREE_OPERAND (arg1, 1);
2480 }
2481 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2482 {
2483 common = TREE_OPERAND (arg0, 1);
2484 left = TREE_OPERAND (arg0, 0);
2485 right = TREE_OPERAND (arg1, 0);
2486 }
2487 else
2488 return 0;
2489
2490 return fold (build (TREE_CODE (arg0), type, common,
2491 fold (build (code, type, left, right))));
2492 }
2493 \f
2494 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2495 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2496
2497 static tree
2498 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
2499 int unsignedp)
2500 {
2501 tree result = build (BIT_FIELD_REF, type, inner,
2502 size_int (bitsize), bitsize_int (bitpos));
2503
2504 TREE_UNSIGNED (result) = unsignedp;
2505
2506 return result;
2507 }
2508
2509 /* Optimize a bit-field compare.
2510
2511 There are two cases: First is a compare against a constant and the
2512 second is a comparison of two items where the fields are at the same
2513 bit position relative to the start of a chunk (byte, halfword, word)
2514 large enough to contain it. In these cases we can avoid the shift
2515 implicit in bitfield extractions.
2516
2517 For constants, we emit a compare of the shifted constant with the
2518 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2519 compared. For two fields at the same position, we do the ANDs with the
2520 similar mask and compare the result of the ANDs.
2521
2522 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2523 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2524 are the left and right operands of the comparison, respectively.
2525
2526 If the optimization described above can be done, we return the resulting
2527 tree. Otherwise we return zero. */
2528
2529 static tree
2530 optimize_bit_field_compare (enum tree_code code, tree compare_type,
2531 tree lhs, tree rhs)
2532 {
2533 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2534 tree type = TREE_TYPE (lhs);
2535 tree signed_type, unsigned_type;
2536 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2537 enum machine_mode lmode, rmode, nmode;
2538 int lunsignedp, runsignedp;
2539 int lvolatilep = 0, rvolatilep = 0;
2540 tree linner, rinner = NULL_TREE;
2541 tree mask;
2542 tree offset;
2543
2544 /* Get all the information about the extractions being done. If the bit size
2545 if the same as the size of the underlying object, we aren't doing an
2546 extraction at all and so can do nothing. We also don't want to
2547 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2548 then will no longer be able to replace it. */
2549 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2550 &lunsignedp, &lvolatilep);
2551 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2552 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2553 return 0;
2554
2555 if (!const_p)
2556 {
2557 /* If this is not a constant, we can only do something if bit positions,
2558 sizes, and signedness are the same. */
2559 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2560 &runsignedp, &rvolatilep);
2561
2562 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2563 || lunsignedp != runsignedp || offset != 0
2564 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2565 return 0;
2566 }
2567
2568 /* See if we can find a mode to refer to this field. We should be able to,
2569 but fail if we can't. */
2570 nmode = get_best_mode (lbitsize, lbitpos,
2571 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2572 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2573 TYPE_ALIGN (TREE_TYPE (rinner))),
2574 word_mode, lvolatilep || rvolatilep);
2575 if (nmode == VOIDmode)
2576 return 0;
2577
2578 /* Set signed and unsigned types of the precision of this mode for the
2579 shifts below. */
2580 signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2581 unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2582
2583 /* Compute the bit position and size for the new reference and our offset
2584 within it. If the new reference is the same size as the original, we
2585 won't optimize anything, so return zero. */
2586 nbitsize = GET_MODE_BITSIZE (nmode);
2587 nbitpos = lbitpos & ~ (nbitsize - 1);
2588 lbitpos -= nbitpos;
2589 if (nbitsize == lbitsize)
2590 return 0;
2591
2592 if (BYTES_BIG_ENDIAN)
2593 lbitpos = nbitsize - lbitsize - lbitpos;
2594
2595 /* Make the mask to be used against the extracted field. */
2596 mask = build_int_2 (~0, ~0);
2597 TREE_TYPE (mask) = unsigned_type;
2598 force_fit_type (mask, 0);
2599 mask = convert (unsigned_type, mask);
2600 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2601 mask = const_binop (RSHIFT_EXPR, mask,
2602 size_int (nbitsize - lbitsize - lbitpos), 0);
2603
2604 if (! const_p)
2605 /* If not comparing with constant, just rework the comparison
2606 and return. */
2607 return build (code, compare_type,
2608 build (BIT_AND_EXPR, unsigned_type,
2609 make_bit_field_ref (linner, unsigned_type,
2610 nbitsize, nbitpos, 1),
2611 mask),
2612 build (BIT_AND_EXPR, unsigned_type,
2613 make_bit_field_ref (rinner, unsigned_type,
2614 nbitsize, nbitpos, 1),
2615 mask));
2616
2617 /* Otherwise, we are handling the constant case. See if the constant is too
2618 big for the field. Warn and return a tree of for 0 (false) if so. We do
2619 this not only for its own sake, but to avoid having to test for this
2620 error case below. If we didn't, we might generate wrong code.
2621
2622 For unsigned fields, the constant shifted right by the field length should
2623 be all zero. For signed fields, the high-order bits should agree with
2624 the sign bit. */
2625
2626 if (lunsignedp)
2627 {
2628 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2629 convert (unsigned_type, rhs),
2630 size_int (lbitsize), 0)))
2631 {
2632 warning ("comparison is always %d due to width of bit-field",
2633 code == NE_EXPR);
2634 return convert (compare_type,
2635 (code == NE_EXPR
2636 ? integer_one_node : integer_zero_node));
2637 }
2638 }
2639 else
2640 {
2641 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2642 size_int (lbitsize - 1), 0);
2643 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2644 {
2645 warning ("comparison is always %d due to width of bit-field",
2646 code == NE_EXPR);
2647 return convert (compare_type,
2648 (code == NE_EXPR
2649 ? integer_one_node : integer_zero_node));
2650 }
2651 }
2652
2653 /* Single-bit compares should always be against zero. */
2654 if (lbitsize == 1 && ! integer_zerop (rhs))
2655 {
2656 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2657 rhs = convert (type, integer_zero_node);
2658 }
2659
2660 /* Make a new bitfield reference, shift the constant over the
2661 appropriate number of bits and mask it with the computed mask
2662 (in case this was a signed field). If we changed it, make a new one. */
2663 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2664 if (lvolatilep)
2665 {
2666 TREE_SIDE_EFFECTS (lhs) = 1;
2667 TREE_THIS_VOLATILE (lhs) = 1;
2668 }
2669
2670 rhs = fold (const_binop (BIT_AND_EXPR,
2671 const_binop (LSHIFT_EXPR,
2672 convert (unsigned_type, rhs),
2673 size_int (lbitpos), 0),
2674 mask, 0));
2675
2676 return build (code, compare_type,
2677 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2678 rhs);
2679 }
2680 \f
2681 /* Subroutine for fold_truthop: decode a field reference.
2682
2683 If EXP is a comparison reference, we return the innermost reference.
2684
2685 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2686 set to the starting bit number.
2687
2688 If the innermost field can be completely contained in a mode-sized
2689 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2690
2691 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2692 otherwise it is not changed.
2693
2694 *PUNSIGNEDP is set to the signedness of the field.
2695
2696 *PMASK is set to the mask used. This is either contained in a
2697 BIT_AND_EXPR or derived from the width of the field.
2698
2699 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2700
2701 Return 0 if this is not a component reference or is one that we can't
2702 do anything with. */
2703
2704 static tree
2705 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
2706 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
2707 int *punsignedp, int *pvolatilep,
2708 tree *pmask, tree *pand_mask)
2709 {
2710 tree outer_type = 0;
2711 tree and_mask = 0;
2712 tree mask, inner, offset;
2713 tree unsigned_type;
2714 unsigned int precision;
2715
2716 /* All the optimizations using this function assume integer fields.
2717 There are problems with FP fields since the type_for_size call
2718 below can fail for, e.g., XFmode. */
2719 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2720 return 0;
2721
2722 /* We are interested in the bare arrangement of bits, so strip everything
2723 that doesn't affect the machine mode. However, record the type of the
2724 outermost expression if it may matter below. */
2725 if (TREE_CODE (exp) == NOP_EXPR
2726 || TREE_CODE (exp) == CONVERT_EXPR
2727 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2728 outer_type = TREE_TYPE (exp);
2729 STRIP_NOPS (exp);
2730
2731 if (TREE_CODE (exp) == BIT_AND_EXPR)
2732 {
2733 and_mask = TREE_OPERAND (exp, 1);
2734 exp = TREE_OPERAND (exp, 0);
2735 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2736 if (TREE_CODE (and_mask) != INTEGER_CST)
2737 return 0;
2738 }
2739
2740 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2741 punsignedp, pvolatilep);
2742 if ((inner == exp && and_mask == 0)
2743 || *pbitsize < 0 || offset != 0
2744 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
2745 return 0;
2746
2747 /* If the number of bits in the reference is the same as the bitsize of
2748 the outer type, then the outer type gives the signedness. Otherwise
2749 (in case of a small bitfield) the signedness is unchanged. */
2750 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
2751 *punsignedp = TREE_UNSIGNED (outer_type);
2752
2753 /* Compute the mask to access the bitfield. */
2754 unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
2755 precision = TYPE_PRECISION (unsigned_type);
2756
2757 mask = build_int_2 (~0, ~0);
2758 TREE_TYPE (mask) = unsigned_type;
2759 force_fit_type (mask, 0);
2760 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2761 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2762
2763 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2764 if (and_mask != 0)
2765 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2766 convert (unsigned_type, and_mask), mask));
2767
2768 *pmask = mask;
2769 *pand_mask = and_mask;
2770 return inner;
2771 }
2772
2773 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
2774 bit positions. */
2775
2776 static int
2777 all_ones_mask_p (tree mask, int size)
2778 {
2779 tree type = TREE_TYPE (mask);
2780 unsigned int precision = TYPE_PRECISION (type);
2781 tree tmask;
2782
2783 tmask = build_int_2 (~0, ~0);
2784 TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
2785 force_fit_type (tmask, 0);
2786 return
2787 tree_int_cst_equal (mask,
2788 const_binop (RSHIFT_EXPR,
2789 const_binop (LSHIFT_EXPR, tmask,
2790 size_int (precision - size),
2791 0),
2792 size_int (precision - size), 0));
2793 }
2794
2795 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
2796 represents the sign bit of EXP's type. If EXP represents a sign
2797 or zero extension, also test VAL against the unextended type.
2798 The return value is the (sub)expression whose sign bit is VAL,
2799 or NULL_TREE otherwise. */
2800
2801 static tree
2802 sign_bit_p (tree exp, tree val)
2803 {
2804 unsigned HOST_WIDE_INT mask_lo, lo;
2805 HOST_WIDE_INT mask_hi, hi;
2806 int width;
2807 tree t;
2808
2809 /* Tree EXP must have an integral type. */
2810 t = TREE_TYPE (exp);
2811 if (! INTEGRAL_TYPE_P (t))
2812 return NULL_TREE;
2813
2814 /* Tree VAL must be an integer constant. */
2815 if (TREE_CODE (val) != INTEGER_CST
2816 || TREE_CONSTANT_OVERFLOW (val))
2817 return NULL_TREE;
2818
2819 width = TYPE_PRECISION (t);
2820 if (width > HOST_BITS_PER_WIDE_INT)
2821 {
2822 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
2823 lo = 0;
2824
2825 mask_hi = ((unsigned HOST_WIDE_INT) -1
2826 >> (2 * HOST_BITS_PER_WIDE_INT - width));
2827 mask_lo = -1;
2828 }
2829 else
2830 {
2831 hi = 0;
2832 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
2833
2834 mask_hi = 0;
2835 mask_lo = ((unsigned HOST_WIDE_INT) -1
2836 >> (HOST_BITS_PER_WIDE_INT - width));
2837 }
2838
2839 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
2840 treat VAL as if it were unsigned. */
2841 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
2842 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
2843 return exp;
2844
2845 /* Handle extension from a narrower type. */
2846 if (TREE_CODE (exp) == NOP_EXPR
2847 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
2848 return sign_bit_p (TREE_OPERAND (exp, 0), val);
2849
2850 return NULL_TREE;
2851 }
2852
2853 /* Subroutine for fold_truthop: determine if an operand is simple enough
2854 to be evaluated unconditionally. */
2855
2856 static int
2857 simple_operand_p (tree exp)
2858 {
2859 /* Strip any conversions that don't change the machine mode. */
2860 while ((TREE_CODE (exp) == NOP_EXPR
2861 || TREE_CODE (exp) == CONVERT_EXPR)
2862 && (TYPE_MODE (TREE_TYPE (exp))
2863 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2864 exp = TREE_OPERAND (exp, 0);
2865
2866 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2867 || (DECL_P (exp)
2868 && ! TREE_ADDRESSABLE (exp)
2869 && ! TREE_THIS_VOLATILE (exp)
2870 && ! DECL_NONLOCAL (exp)
2871 /* Don't regard global variables as simple. They may be
2872 allocated in ways unknown to the compiler (shared memory,
2873 #pragma weak, etc). */
2874 && ! TREE_PUBLIC (exp)
2875 && ! DECL_EXTERNAL (exp)
2876 /* Loading a static variable is unduly expensive, but global
2877 registers aren't expensive. */
2878 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2879 }
2880 \f
2881 /* The following functions are subroutines to fold_range_test and allow it to
2882 try to change a logical combination of comparisons into a range test.
2883
2884 For example, both
2885 X == 2 || X == 3 || X == 4 || X == 5
2886 and
2887 X >= 2 && X <= 5
2888 are converted to
2889 (unsigned) (X - 2) <= 3
2890
2891 We describe each set of comparisons as being either inside or outside
2892 a range, using a variable named like IN_P, and then describe the
2893 range with a lower and upper bound. If one of the bounds is omitted,
2894 it represents either the highest or lowest value of the type.
2895
2896 In the comments below, we represent a range by two numbers in brackets
2897 preceded by a "+" to designate being inside that range, or a "-" to
2898 designate being outside that range, so the condition can be inverted by
2899 flipping the prefix. An omitted bound is represented by a "-". For
2900 example, "- [-, 10]" means being outside the range starting at the lowest
2901 possible value and ending at 10, in other words, being greater than 10.
2902 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
2903 always false.
2904
2905 We set up things so that the missing bounds are handled in a consistent
2906 manner so neither a missing bound nor "true" and "false" need to be
2907 handled using a special case. */
2908
2909 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
2910 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
2911 and UPPER1_P are nonzero if the respective argument is an upper bound
2912 and zero for a lower. TYPE, if nonzero, is the type of the result; it
2913 must be specified for a comparison. ARG1 will be converted to ARG0's
2914 type if both are specified. */
2915
2916 static tree
2917 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
2918 tree arg1, int upper1_p)
2919 {
2920 tree tem;
2921 int result;
2922 int sgn0, sgn1;
2923
2924 /* If neither arg represents infinity, do the normal operation.
2925 Else, if not a comparison, return infinity. Else handle the special
2926 comparison rules. Note that most of the cases below won't occur, but
2927 are handled for consistency. */
2928
2929 if (arg0 != 0 && arg1 != 0)
2930 {
2931 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
2932 arg0, convert (TREE_TYPE (arg0), arg1)));
2933 STRIP_NOPS (tem);
2934 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
2935 }
2936
2937 if (TREE_CODE_CLASS (code) != '<')
2938 return 0;
2939
2940 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
2941 for neither. In real maths, we cannot assume open ended ranges are
2942 the same. But, this is computer arithmetic, where numbers are finite.
2943 We can therefore make the transformation of any unbounded range with
2944 the value Z, Z being greater than any representable number. This permits
2945 us to treat unbounded ranges as equal. */
2946 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
2947 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
2948 switch (code)
2949 {
2950 case EQ_EXPR:
2951 result = sgn0 == sgn1;
2952 break;
2953 case NE_EXPR:
2954 result = sgn0 != sgn1;
2955 break;
2956 case LT_EXPR:
2957 result = sgn0 < sgn1;
2958 break;
2959 case LE_EXPR:
2960 result = sgn0 <= sgn1;
2961 break;
2962 case GT_EXPR:
2963 result = sgn0 > sgn1;
2964 break;
2965 case GE_EXPR:
2966 result = sgn0 >= sgn1;
2967 break;
2968 default:
2969 abort ();
2970 }
2971
2972 return convert (type, result ? integer_one_node : integer_zero_node);
2973 }
2974 \f
2975 /* Given EXP, a logical expression, set the range it is testing into
2976 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
2977 actually being tested. *PLOW and *PHIGH will be made of the same type
2978 as the returned expression. If EXP is not a comparison, we will most
2979 likely not be returning a useful value and range. */
2980
2981 static tree
2982 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
2983 {
2984 enum tree_code code;
2985 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
2986 tree orig_type = NULL_TREE;
2987 int in_p, n_in_p;
2988 tree low, high, n_low, n_high;
2989
2990 /* Start with simply saying "EXP != 0" and then look at the code of EXP
2991 and see if we can refine the range. Some of the cases below may not
2992 happen, but it doesn't seem worth worrying about this. We "continue"
2993 the outer loop when we've changed something; otherwise we "break"
2994 the switch, which will "break" the while. */
2995
2996 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
2997
2998 while (1)
2999 {
3000 code = TREE_CODE (exp);
3001
3002 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3003 {
3004 if (first_rtl_op (code) > 0)
3005 arg0 = TREE_OPERAND (exp, 0);
3006 if (TREE_CODE_CLASS (code) == '<'
3007 || TREE_CODE_CLASS (code) == '1'
3008 || TREE_CODE_CLASS (code) == '2')
3009 type = TREE_TYPE (arg0);
3010 if (TREE_CODE_CLASS (code) == '2'
3011 || TREE_CODE_CLASS (code) == '<'
3012 || (TREE_CODE_CLASS (code) == 'e'
3013 && TREE_CODE_LENGTH (code) > 1))
3014 arg1 = TREE_OPERAND (exp, 1);
3015 }
3016
3017 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3018 lose a cast by accident. */
3019 if (type != NULL_TREE && orig_type == NULL_TREE)
3020 orig_type = type;
3021
3022 switch (code)
3023 {
3024 case TRUTH_NOT_EXPR:
3025 in_p = ! in_p, exp = arg0;
3026 continue;
3027
3028 case EQ_EXPR: case NE_EXPR:
3029 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3030 /* We can only do something if the range is testing for zero
3031 and if the second operand is an integer constant. Note that
3032 saying something is "in" the range we make is done by
3033 complementing IN_P since it will set in the initial case of
3034 being not equal to zero; "out" is leaving it alone. */
3035 if (low == 0 || high == 0
3036 || ! integer_zerop (low) || ! integer_zerop (high)
3037 || TREE_CODE (arg1) != INTEGER_CST)
3038 break;
3039
3040 switch (code)
3041 {
3042 case NE_EXPR: /* - [c, c] */
3043 low = high = arg1;
3044 break;
3045 case EQ_EXPR: /* + [c, c] */
3046 in_p = ! in_p, low = high = arg1;
3047 break;
3048 case GT_EXPR: /* - [-, c] */
3049 low = 0, high = arg1;
3050 break;
3051 case GE_EXPR: /* + [c, -] */
3052 in_p = ! in_p, low = arg1, high = 0;
3053 break;
3054 case LT_EXPR: /* - [c, -] */
3055 low = arg1, high = 0;
3056 break;
3057 case LE_EXPR: /* + [-, c] */
3058 in_p = ! in_p, low = 0, high = arg1;
3059 break;
3060 default:
3061 abort ();
3062 }
3063
3064 exp = arg0;
3065
3066 /* If this is an unsigned comparison, we also know that EXP is
3067 greater than or equal to zero. We base the range tests we make
3068 on that fact, so we record it here so we can parse existing
3069 range tests. */
3070 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3071 {
3072 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3073 1, convert (type, integer_zero_node),
3074 NULL_TREE))
3075 break;
3076
3077 in_p = n_in_p, low = n_low, high = n_high;
3078
3079 /* If the high bound is missing, but we
3080 have a low bound, reverse the range so
3081 it goes from zero to the low bound minus 1. */
3082 if (high == 0 && low)
3083 {
3084 in_p = ! in_p;
3085 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3086 integer_one_node, 0);
3087 low = convert (type, integer_zero_node);
3088 }
3089 }
3090 continue;
3091
3092 case NEGATE_EXPR:
3093 /* (-x) IN [a,b] -> x in [-b, -a] */
3094 n_low = range_binop (MINUS_EXPR, type,
3095 convert (type, integer_zero_node), 0, high, 1);
3096 n_high = range_binop (MINUS_EXPR, type,
3097 convert (type, integer_zero_node), 0, low, 0);
3098 low = n_low, high = n_high;
3099 exp = arg0;
3100 continue;
3101
3102 case BIT_NOT_EXPR:
3103 /* ~ X -> -X - 1 */
3104 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3105 convert (type, integer_one_node));
3106 continue;
3107
3108 case PLUS_EXPR: case MINUS_EXPR:
3109 if (TREE_CODE (arg1) != INTEGER_CST)
3110 break;
3111
3112 /* If EXP is signed, any overflow in the computation is undefined,
3113 so we don't worry about it so long as our computations on
3114 the bounds don't overflow. For unsigned, overflow is defined
3115 and this is exactly the right thing. */
3116 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3117 type, low, 0, arg1, 0);
3118 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3119 type, high, 1, arg1, 0);
3120 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3121 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3122 break;
3123
3124 /* Check for an unsigned range which has wrapped around the maximum
3125 value thus making n_high < n_low, and normalize it. */
3126 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3127 {
3128 low = range_binop (PLUS_EXPR, type, n_high, 0,
3129 integer_one_node, 0);
3130 high = range_binop (MINUS_EXPR, type, n_low, 0,
3131 integer_one_node, 0);
3132
3133 /* If the range is of the form +/- [ x+1, x ], we won't
3134 be able to normalize it. But then, it represents the
3135 whole range or the empty set, so make it
3136 +/- [ -, - ]. */
3137 if (tree_int_cst_equal (n_low, low)
3138 && tree_int_cst_equal (n_high, high))
3139 low = high = 0;
3140 else
3141 in_p = ! in_p;
3142 }
3143 else
3144 low = n_low, high = n_high;
3145
3146 exp = arg0;
3147 continue;
3148
3149 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3150 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3151 break;
3152
3153 if (! INTEGRAL_TYPE_P (type)
3154 || (low != 0 && ! int_fits_type_p (low, type))
3155 || (high != 0 && ! int_fits_type_p (high, type)))
3156 break;
3157
3158 n_low = low, n_high = high;
3159
3160 if (n_low != 0)
3161 n_low = convert (type, n_low);
3162
3163 if (n_high != 0)
3164 n_high = convert (type, n_high);
3165
3166 /* If we're converting from an unsigned to a signed type,
3167 we will be doing the comparison as unsigned. The tests above
3168 have already verified that LOW and HIGH are both positive.
3169
3170 So we have to make sure that the original unsigned value will
3171 be interpreted as positive. */
3172 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3173 {
3174 tree equiv_type = (*lang_hooks.types.type_for_mode)
3175 (TYPE_MODE (type), 1);
3176 tree high_positive;
3177
3178 /* A range without an upper bound is, naturally, unbounded.
3179 Since convert would have cropped a very large value, use
3180 the max value for the destination type. */
3181 high_positive
3182 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3183 : TYPE_MAX_VALUE (type);
3184
3185 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3186 high_positive = fold (build (RSHIFT_EXPR, type,
3187 convert (type, high_positive),
3188 convert (type, integer_one_node)));
3189
3190 /* If the low bound is specified, "and" the range with the
3191 range for which the original unsigned value will be
3192 positive. */
3193 if (low != 0)
3194 {
3195 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3196 1, n_low, n_high,
3197 1, convert (type, integer_zero_node),
3198 high_positive))
3199 break;
3200
3201 in_p = (n_in_p == in_p);
3202 }
3203 else
3204 {
3205 /* Otherwise, "or" the range with the range of the input
3206 that will be interpreted as negative. */
3207 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3208 0, n_low, n_high,
3209 1, convert (type, integer_zero_node),
3210 high_positive))
3211 break;
3212
3213 in_p = (in_p != n_in_p);
3214 }
3215 }
3216
3217 exp = arg0;
3218 low = n_low, high = n_high;
3219 continue;
3220
3221 default:
3222 break;
3223 }
3224
3225 break;
3226 }
3227
3228 /* If EXP is a constant, we can evaluate whether this is true or false. */
3229 if (TREE_CODE (exp) == INTEGER_CST)
3230 {
3231 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3232 exp, 0, low, 0))
3233 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3234 exp, 1, high, 1)));
3235 low = high = 0;
3236 exp = 0;
3237 }
3238
3239 *pin_p = in_p, *plow = low, *phigh = high;
3240 return exp;
3241 }
3242 \f
3243 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3244 type, TYPE, return an expression to test if EXP is in (or out of, depending
3245 on IN_P) the range. */
3246
3247 static tree
3248 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3249 {
3250 tree etype = TREE_TYPE (exp);
3251 tree value;
3252
3253 if (! in_p
3254 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3255 return invert_truthvalue (value);
3256
3257 if (low == 0 && high == 0)
3258 return convert (type, integer_one_node);
3259
3260 if (low == 0)
3261 return fold (build (LE_EXPR, type, exp, high));
3262
3263 if (high == 0)
3264 return fold (build (GE_EXPR, type, exp, low));
3265
3266 if (operand_equal_p (low, high, 0))
3267 return fold (build (EQ_EXPR, type, exp, low));
3268
3269 if (integer_zerop (low))
3270 {
3271 if (! TREE_UNSIGNED (etype))
3272 {
3273 etype = (*lang_hooks.types.unsigned_type) (etype);
3274 high = convert (etype, high);
3275 exp = convert (etype, exp);
3276 }
3277 return build_range_check (type, exp, 1, 0, high);
3278 }
3279
3280 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3281 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3282 {
3283 unsigned HOST_WIDE_INT lo;
3284 HOST_WIDE_INT hi;
3285 int prec;
3286
3287 prec = TYPE_PRECISION (etype);
3288 if (prec <= HOST_BITS_PER_WIDE_INT)
3289 {
3290 hi = 0;
3291 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3292 }
3293 else
3294 {
3295 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3296 lo = (unsigned HOST_WIDE_INT) -1;
3297 }
3298
3299 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3300 {
3301 if (TREE_UNSIGNED (etype))
3302 {
3303 etype = (*lang_hooks.types.signed_type) (etype);
3304 exp = convert (etype, exp);
3305 }
3306 return fold (build (GT_EXPR, type, exp,
3307 convert (etype, integer_zero_node)));
3308 }
3309 }
3310
3311 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3312 && ! TREE_OVERFLOW (value))
3313 return build_range_check (type,
3314 fold (build (MINUS_EXPR, etype, exp, low)),
3315 1, convert (etype, integer_zero_node), value);
3316
3317 return 0;
3318 }
3319 \f
3320 /* Given two ranges, see if we can merge them into one. Return 1 if we
3321 can, 0 if we can't. Set the output range into the specified parameters. */
3322
3323 static int
3324 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3325 tree high0, int in1_p, tree low1, tree high1)
3326 {
3327 int no_overlap;
3328 int subset;
3329 int temp;
3330 tree tem;
3331 int in_p;
3332 tree low, high;
3333 int lowequal = ((low0 == 0 && low1 == 0)
3334 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3335 low0, 0, low1, 0)));
3336 int highequal = ((high0 == 0 && high1 == 0)
3337 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3338 high0, 1, high1, 1)));
3339
3340 /* Make range 0 be the range that starts first, or ends last if they
3341 start at the same value. Swap them if it isn't. */
3342 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3343 low0, 0, low1, 0))
3344 || (lowequal
3345 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3346 high1, 1, high0, 1))))
3347 {
3348 temp = in0_p, in0_p = in1_p, in1_p = temp;
3349 tem = low0, low0 = low1, low1 = tem;
3350 tem = high0, high0 = high1, high1 = tem;
3351 }
3352
3353 /* Now flag two cases, whether the ranges are disjoint or whether the
3354 second range is totally subsumed in the first. Note that the tests
3355 below are simplified by the ones above. */
3356 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3357 high0, 1, low1, 0));
3358 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3359 high1, 1, high0, 1));
3360
3361 /* We now have four cases, depending on whether we are including or
3362 excluding the two ranges. */
3363 if (in0_p && in1_p)
3364 {
3365 /* If they don't overlap, the result is false. If the second range
3366 is a subset it is the result. Otherwise, the range is from the start
3367 of the second to the end of the first. */
3368 if (no_overlap)
3369 in_p = 0, low = high = 0;
3370 else if (subset)
3371 in_p = 1, low = low1, high = high1;
3372 else
3373 in_p = 1, low = low1, high = high0;
3374 }
3375
3376 else if (in0_p && ! in1_p)
3377 {
3378 /* If they don't overlap, the result is the first range. If they are
3379 equal, the result is false. If the second range is a subset of the
3380 first, and the ranges begin at the same place, we go from just after
3381 the end of the first range to the end of the second. If the second
3382 range is not a subset of the first, or if it is a subset and both
3383 ranges end at the same place, the range starts at the start of the
3384 first range and ends just before the second range.
3385 Otherwise, we can't describe this as a single range. */
3386 if (no_overlap)
3387 in_p = 1, low = low0, high = high0;
3388 else if (lowequal && highequal)
3389 in_p = 0, low = high = 0;
3390 else if (subset && lowequal)
3391 {
3392 in_p = 1, high = high0;
3393 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3394 integer_one_node, 0);
3395 }
3396 else if (! subset || highequal)
3397 {
3398 in_p = 1, low = low0;
3399 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3400 integer_one_node, 0);
3401 }
3402 else
3403 return 0;
3404 }
3405
3406 else if (! in0_p && in1_p)
3407 {
3408 /* If they don't overlap, the result is the second range. If the second
3409 is a subset of the first, the result is false. Otherwise,
3410 the range starts just after the first range and ends at the
3411 end of the second. */
3412 if (no_overlap)
3413 in_p = 1, low = low1, high = high1;
3414 else if (subset || highequal)
3415 in_p = 0, low = high = 0;
3416 else
3417 {
3418 in_p = 1, high = high1;
3419 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3420 integer_one_node, 0);
3421 }
3422 }
3423
3424 else
3425 {
3426 /* The case where we are excluding both ranges. Here the complex case
3427 is if they don't overlap. In that case, the only time we have a
3428 range is if they are adjacent. If the second is a subset of the
3429 first, the result is the first. Otherwise, the range to exclude
3430 starts at the beginning of the first range and ends at the end of the
3431 second. */
3432 if (no_overlap)
3433 {
3434 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3435 range_binop (PLUS_EXPR, NULL_TREE,
3436 high0, 1,
3437 integer_one_node, 1),
3438 1, low1, 0)))
3439 in_p = 0, low = low0, high = high1;
3440 else
3441 return 0;
3442 }
3443 else if (subset)
3444 in_p = 0, low = low0, high = high0;
3445 else
3446 in_p = 0, low = low0, high = high1;
3447 }
3448
3449 *pin_p = in_p, *plow = low, *phigh = high;
3450 return 1;
3451 }
3452 \f
3453 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3454 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3455 #endif
3456
3457 /* EXP is some logical combination of boolean tests. See if we can
3458 merge it into some range test. Return the new tree if so. */
3459
3460 static tree
3461 fold_range_test (tree exp)
3462 {
3463 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3464 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3465 int in0_p, in1_p, in_p;
3466 tree low0, low1, low, high0, high1, high;
3467 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3468 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3469 tree tem;
3470
3471 /* If this is an OR operation, invert both sides; we will invert
3472 again at the end. */
3473 if (or_op)
3474 in0_p = ! in0_p, in1_p = ! in1_p;
3475
3476 /* If both expressions are the same, if we can merge the ranges, and we
3477 can build the range test, return it or it inverted. If one of the
3478 ranges is always true or always false, consider it to be the same
3479 expression as the other. */
3480 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3481 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3482 in1_p, low1, high1)
3483 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3484 lhs != 0 ? lhs
3485 : rhs != 0 ? rhs : integer_zero_node,
3486 in_p, low, high))))
3487 return or_op ? invert_truthvalue (tem) : tem;
3488
3489 /* On machines where the branch cost is expensive, if this is a
3490 short-circuited branch and the underlying object on both sides
3491 is the same, make a non-short-circuit operation. */
3492 else if (RANGE_TEST_NON_SHORT_CIRCUIT
3493 && lhs != 0 && rhs != 0
3494 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3495 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3496 && operand_equal_p (lhs, rhs, 0))
3497 {
3498 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3499 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3500 which cases we can't do this. */
3501 if (simple_operand_p (lhs))
3502 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3503 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3504 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3505 TREE_OPERAND (exp, 1));
3506
3507 else if ((*lang_hooks.decls.global_bindings_p) () == 0
3508 && ! CONTAINS_PLACEHOLDER_P (lhs))
3509 {
3510 tree common = save_expr (lhs);
3511
3512 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3513 or_op ? ! in0_p : in0_p,
3514 low0, high0))
3515 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3516 or_op ? ! in1_p : in1_p,
3517 low1, high1))))
3518 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3519 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3520 TREE_TYPE (exp), lhs, rhs);
3521 }
3522 }
3523
3524 return 0;
3525 }
3526 \f
3527 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3528 bit value. Arrange things so the extra bits will be set to zero if and
3529 only if C is signed-extended to its full width. If MASK is nonzero,
3530 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3531
3532 static tree
3533 unextend (tree c, int p, int unsignedp, tree mask)
3534 {
3535 tree type = TREE_TYPE (c);
3536 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3537 tree temp;
3538
3539 if (p == modesize || unsignedp)
3540 return c;
3541
3542 /* We work by getting just the sign bit into the low-order bit, then
3543 into the high-order bit, then sign-extend. We then XOR that value
3544 with C. */
3545 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3546 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3547
3548 /* We must use a signed type in order to get an arithmetic right shift.
3549 However, we must also avoid introducing accidental overflows, so that
3550 a subsequent call to integer_zerop will work. Hence we must
3551 do the type conversion here. At this point, the constant is either
3552 zero or one, and the conversion to a signed type can never overflow.
3553 We could get an overflow if this conversion is done anywhere else. */
3554 if (TREE_UNSIGNED (type))
3555 temp = convert ((*lang_hooks.types.signed_type) (type), temp);
3556
3557 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3558 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3559 if (mask != 0)
3560 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3561 /* If necessary, convert the type back to match the type of C. */
3562 if (TREE_UNSIGNED (type))
3563 temp = convert (type, temp);
3564
3565 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3566 }
3567 \f
3568 /* Find ways of folding logical expressions of LHS and RHS:
3569 Try to merge two comparisons to the same innermost item.
3570 Look for range tests like "ch >= '0' && ch <= '9'".
3571 Look for combinations of simple terms on machines with expensive branches
3572 and evaluate the RHS unconditionally.
3573
3574 For example, if we have p->a == 2 && p->b == 4 and we can make an
3575 object large enough to span both A and B, we can do this with a comparison
3576 against the object ANDed with the a mask.
3577
3578 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3579 operations to do this with one comparison.
3580
3581 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3582 function and the one above.
3583
3584 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3585 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3586
3587 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3588 two operands.
3589
3590 We return the simplified tree or 0 if no optimization is possible. */
3591
3592 static tree
3593 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
3594 {
3595 /* If this is the "or" of two comparisons, we can do something if
3596 the comparisons are NE_EXPR. If this is the "and", we can do something
3597 if the comparisons are EQ_EXPR. I.e.,
3598 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3599
3600 WANTED_CODE is this operation code. For single bit fields, we can
3601 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3602 comparison for one-bit fields. */
3603
3604 enum tree_code wanted_code;
3605 enum tree_code lcode, rcode;
3606 tree ll_arg, lr_arg, rl_arg, rr_arg;
3607 tree ll_inner, lr_inner, rl_inner, rr_inner;
3608 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3609 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3610 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3611 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3612 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3613 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3614 enum machine_mode lnmode, rnmode;
3615 tree ll_mask, lr_mask, rl_mask, rr_mask;
3616 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3617 tree l_const, r_const;
3618 tree lntype, rntype, result;
3619 int first_bit, end_bit;
3620 int volatilep;
3621
3622 /* Start by getting the comparison codes. Fail if anything is volatile.
3623 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3624 it were surrounded with a NE_EXPR. */
3625
3626 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3627 return 0;
3628
3629 lcode = TREE_CODE (lhs);
3630 rcode = TREE_CODE (rhs);
3631
3632 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3633 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3634
3635 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3636 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3637
3638 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3639 return 0;
3640
3641 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3642 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3643
3644 ll_arg = TREE_OPERAND (lhs, 0);
3645 lr_arg = TREE_OPERAND (lhs, 1);
3646 rl_arg = TREE_OPERAND (rhs, 0);
3647 rr_arg = TREE_OPERAND (rhs, 1);
3648
3649 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3650 if (simple_operand_p (ll_arg)
3651 && simple_operand_p (lr_arg)
3652 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3653 {
3654 int compcode;
3655
3656 if (operand_equal_p (ll_arg, rl_arg, 0)
3657 && operand_equal_p (lr_arg, rr_arg, 0))
3658 {
3659 int lcompcode, rcompcode;
3660
3661 lcompcode = comparison_to_compcode (lcode);
3662 rcompcode = comparison_to_compcode (rcode);
3663 compcode = (code == TRUTH_AND_EXPR)
3664 ? lcompcode & rcompcode
3665 : lcompcode | rcompcode;
3666 }
3667 else if (operand_equal_p (ll_arg, rr_arg, 0)
3668 && operand_equal_p (lr_arg, rl_arg, 0))
3669 {
3670 int lcompcode, rcompcode;
3671
3672 rcode = swap_tree_comparison (rcode);
3673 lcompcode = comparison_to_compcode (lcode);
3674 rcompcode = comparison_to_compcode (rcode);
3675 compcode = (code == TRUTH_AND_EXPR)
3676 ? lcompcode & rcompcode
3677 : lcompcode | rcompcode;
3678 }
3679 else
3680 compcode = -1;
3681
3682 if (compcode == COMPCODE_TRUE)
3683 return convert (truth_type, integer_one_node);
3684 else if (compcode == COMPCODE_FALSE)
3685 return convert (truth_type, integer_zero_node);
3686 else if (compcode != -1)
3687 return build (compcode_to_comparison (compcode),
3688 truth_type, ll_arg, lr_arg);
3689 }
3690
3691 /* If the RHS can be evaluated unconditionally and its operands are
3692 simple, it wins to evaluate the RHS unconditionally on machines
3693 with expensive branches. In this case, this isn't a comparison
3694 that can be merged. Avoid doing this if the RHS is a floating-point
3695 comparison since those can trap. */
3696
3697 if (BRANCH_COST >= 2
3698 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3699 && simple_operand_p (rl_arg)
3700 && simple_operand_p (rr_arg))
3701 {
3702 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
3703 if (code == TRUTH_OR_EXPR
3704 && lcode == NE_EXPR && integer_zerop (lr_arg)
3705 && rcode == NE_EXPR && integer_zerop (rr_arg)
3706 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3707 return build (NE_EXPR, truth_type,
3708 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3709 ll_arg, rl_arg),
3710 integer_zero_node);
3711
3712 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
3713 if (code == TRUTH_AND_EXPR
3714 && lcode == EQ_EXPR && integer_zerop (lr_arg)
3715 && rcode == EQ_EXPR && integer_zerop (rr_arg)
3716 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3717 return build (EQ_EXPR, truth_type,
3718 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3719 ll_arg, rl_arg),
3720 integer_zero_node);
3721
3722 return build (code, truth_type, lhs, rhs);
3723 }
3724
3725 /* See if the comparisons can be merged. Then get all the parameters for
3726 each side. */
3727
3728 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3729 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3730 return 0;
3731
3732 volatilep = 0;
3733 ll_inner = decode_field_reference (ll_arg,
3734 &ll_bitsize, &ll_bitpos, &ll_mode,
3735 &ll_unsignedp, &volatilep, &ll_mask,
3736 &ll_and_mask);
3737 lr_inner = decode_field_reference (lr_arg,
3738 &lr_bitsize, &lr_bitpos, &lr_mode,
3739 &lr_unsignedp, &volatilep, &lr_mask,
3740 &lr_and_mask);
3741 rl_inner = decode_field_reference (rl_arg,
3742 &rl_bitsize, &rl_bitpos, &rl_mode,
3743 &rl_unsignedp, &volatilep, &rl_mask,
3744 &rl_and_mask);
3745 rr_inner = decode_field_reference (rr_arg,
3746 &rr_bitsize, &rr_bitpos, &rr_mode,
3747 &rr_unsignedp, &volatilep, &rr_mask,
3748 &rr_and_mask);
3749
3750 /* It must be true that the inner operation on the lhs of each
3751 comparison must be the same if we are to be able to do anything.
3752 Then see if we have constants. If not, the same must be true for
3753 the rhs's. */
3754 if (volatilep || ll_inner == 0 || rl_inner == 0
3755 || ! operand_equal_p (ll_inner, rl_inner, 0))
3756 return 0;
3757
3758 if (TREE_CODE (lr_arg) == INTEGER_CST
3759 && TREE_CODE (rr_arg) == INTEGER_CST)
3760 l_const = lr_arg, r_const = rr_arg;
3761 else if (lr_inner == 0 || rr_inner == 0
3762 || ! operand_equal_p (lr_inner, rr_inner, 0))
3763 return 0;
3764 else
3765 l_const = r_const = 0;
3766
3767 /* If either comparison code is not correct for our logical operation,
3768 fail. However, we can convert a one-bit comparison against zero into
3769 the opposite comparison against that bit being set in the field. */
3770
3771 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3772 if (lcode != wanted_code)
3773 {
3774 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3775 {
3776 /* Make the left operand unsigned, since we are only interested
3777 in the value of one bit. Otherwise we are doing the wrong
3778 thing below. */
3779 ll_unsignedp = 1;
3780 l_const = ll_mask;
3781 }
3782 else
3783 return 0;
3784 }
3785
3786 /* This is analogous to the code for l_const above. */
3787 if (rcode != wanted_code)
3788 {
3789 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3790 {
3791 rl_unsignedp = 1;
3792 r_const = rl_mask;
3793 }
3794 else
3795 return 0;
3796 }
3797
3798 /* After this point all optimizations will generate bit-field
3799 references, which we might not want. */
3800 if (! (*lang_hooks.can_use_bit_fields_p) ())
3801 return 0;
3802
3803 /* See if we can find a mode that contains both fields being compared on
3804 the left. If we can't, fail. Otherwise, update all constants and masks
3805 to be relative to a field of that size. */
3806 first_bit = MIN (ll_bitpos, rl_bitpos);
3807 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3808 lnmode = get_best_mode (end_bit - first_bit, first_bit,
3809 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3810 volatilep);
3811 if (lnmode == VOIDmode)
3812 return 0;
3813
3814 lnbitsize = GET_MODE_BITSIZE (lnmode);
3815 lnbitpos = first_bit & ~ (lnbitsize - 1);
3816 lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
3817 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3818
3819 if (BYTES_BIG_ENDIAN)
3820 {
3821 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3822 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3823 }
3824
3825 ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
3826 size_int (xll_bitpos), 0);
3827 rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
3828 size_int (xrl_bitpos), 0);
3829
3830 if (l_const)
3831 {
3832 l_const = convert (lntype, l_const);
3833 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3834 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3835 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3836 fold (build1 (BIT_NOT_EXPR,
3837 lntype, ll_mask)),
3838 0)))
3839 {
3840 warning ("comparison is always %d", wanted_code == NE_EXPR);
3841
3842 return convert (truth_type,
3843 wanted_code == NE_EXPR
3844 ? integer_one_node : integer_zero_node);
3845 }
3846 }
3847 if (r_const)
3848 {
3849 r_const = convert (lntype, r_const);
3850 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3851 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3852 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3853 fold (build1 (BIT_NOT_EXPR,
3854 lntype, rl_mask)),
3855 0)))
3856 {
3857 warning ("comparison is always %d", wanted_code == NE_EXPR);
3858
3859 return convert (truth_type,
3860 wanted_code == NE_EXPR
3861 ? integer_one_node : integer_zero_node);
3862 }
3863 }
3864
3865 /* If the right sides are not constant, do the same for it. Also,
3866 disallow this optimization if a size or signedness mismatch occurs
3867 between the left and right sides. */
3868 if (l_const == 0)
3869 {
3870 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3871 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3872 /* Make sure the two fields on the right
3873 correspond to the left without being swapped. */
3874 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3875 return 0;
3876
3877 first_bit = MIN (lr_bitpos, rr_bitpos);
3878 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3879 rnmode = get_best_mode (end_bit - first_bit, first_bit,
3880 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3881 volatilep);
3882 if (rnmode == VOIDmode)
3883 return 0;
3884
3885 rnbitsize = GET_MODE_BITSIZE (rnmode);
3886 rnbitpos = first_bit & ~ (rnbitsize - 1);
3887 rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
3888 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3889
3890 if (BYTES_BIG_ENDIAN)
3891 {
3892 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3893 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3894 }
3895
3896 lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
3897 size_int (xlr_bitpos), 0);
3898 rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
3899 size_int (xrr_bitpos), 0);
3900
3901 /* Make a mask that corresponds to both fields being compared.
3902 Do this for both items being compared. If the operands are the
3903 same size and the bits being compared are in the same position
3904 then we can do this by masking both and comparing the masked
3905 results. */
3906 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3907 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3908 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
3909 {
3910 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3911 ll_unsignedp || rl_unsignedp);
3912 if (! all_ones_mask_p (ll_mask, lnbitsize))
3913 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
3914
3915 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
3916 lr_unsignedp || rr_unsignedp);
3917 if (! all_ones_mask_p (lr_mask, rnbitsize))
3918 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
3919
3920 return build (wanted_code, truth_type, lhs, rhs);
3921 }
3922
3923 /* There is still another way we can do something: If both pairs of
3924 fields being compared are adjacent, we may be able to make a wider
3925 field containing them both.
3926
3927 Note that we still must mask the lhs/rhs expressions. Furthermore,
3928 the mask must be shifted to account for the shift done by
3929 make_bit_field_ref. */
3930 if ((ll_bitsize + ll_bitpos == rl_bitpos
3931 && lr_bitsize + lr_bitpos == rr_bitpos)
3932 || (ll_bitpos == rl_bitpos + rl_bitsize
3933 && lr_bitpos == rr_bitpos + rr_bitsize))
3934 {
3935 tree type;
3936
3937 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
3938 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
3939 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
3940 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
3941
3942 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
3943 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
3944 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
3945 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
3946
3947 /* Convert to the smaller type before masking out unwanted bits. */
3948 type = lntype;
3949 if (lntype != rntype)
3950 {
3951 if (lnbitsize > rnbitsize)
3952 {
3953 lhs = convert (rntype, lhs);
3954 ll_mask = convert (rntype, ll_mask);
3955 type = rntype;
3956 }
3957 else if (lnbitsize < rnbitsize)
3958 {
3959 rhs = convert (lntype, rhs);
3960 lr_mask = convert (lntype, lr_mask);
3961 type = lntype;
3962 }
3963 }
3964
3965 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
3966 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3967
3968 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
3969 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
3970
3971 return build (wanted_code, truth_type, lhs, rhs);
3972 }
3973
3974 return 0;
3975 }
3976
3977 /* Handle the case of comparisons with constants. If there is something in
3978 common between the masks, those bits of the constants must be the same.
3979 If not, the condition is always false. Test for this to avoid generating
3980 incorrect code below. */
3981 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3982 if (! integer_zerop (result)
3983 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3984 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3985 {
3986 if (wanted_code == NE_EXPR)
3987 {
3988 warning ("`or' of unmatched not-equal tests is always 1");
3989 return convert (truth_type, integer_one_node);
3990 }
3991 else
3992 {
3993 warning ("`and' of mutually exclusive equal-tests is always 0");
3994 return convert (truth_type, integer_zero_node);
3995 }
3996 }
3997
3998 /* Construct the expression we will return. First get the component
3999 reference we will make. Unless the mask is all ones the width of
4000 that field, perform the mask operation. Then compare with the
4001 merged constant. */
4002 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4003 ll_unsignedp || rl_unsignedp);
4004
4005 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4006 if (! all_ones_mask_p (ll_mask, lnbitsize))
4007 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
4008
4009 return build (wanted_code, truth_type, result,
4010 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4011 }
4012 \f
4013 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4014 constant. */
4015
4016 static tree
4017 optimize_minmax_comparison (tree t)
4018 {
4019 tree type = TREE_TYPE (t);
4020 tree arg0 = TREE_OPERAND (t, 0);
4021 enum tree_code op_code;
4022 tree comp_const = TREE_OPERAND (t, 1);
4023 tree minmax_const;
4024 int consts_equal, consts_lt;
4025 tree inner;
4026
4027 STRIP_SIGN_NOPS (arg0);
4028
4029 op_code = TREE_CODE (arg0);
4030 minmax_const = TREE_OPERAND (arg0, 1);
4031 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4032 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4033 inner = TREE_OPERAND (arg0, 0);
4034
4035 /* If something does not permit us to optimize, return the original tree. */
4036 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4037 || TREE_CODE (comp_const) != INTEGER_CST
4038 || TREE_CONSTANT_OVERFLOW (comp_const)
4039 || TREE_CODE (minmax_const) != INTEGER_CST
4040 || TREE_CONSTANT_OVERFLOW (minmax_const))
4041 return t;
4042
4043 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4044 and GT_EXPR, doing the rest with recursive calls using logical
4045 simplifications. */
4046 switch (TREE_CODE (t))
4047 {
4048 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4049 return
4050 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4051
4052 case GE_EXPR:
4053 return
4054 fold (build (TRUTH_ORIF_EXPR, type,
4055 optimize_minmax_comparison
4056 (build (EQ_EXPR, type, arg0, comp_const)),
4057 optimize_minmax_comparison
4058 (build (GT_EXPR, type, arg0, comp_const))));
4059
4060 case EQ_EXPR:
4061 if (op_code == MAX_EXPR && consts_equal)
4062 /* MAX (X, 0) == 0 -> X <= 0 */
4063 return fold (build (LE_EXPR, type, inner, comp_const));
4064
4065 else if (op_code == MAX_EXPR && consts_lt)
4066 /* MAX (X, 0) == 5 -> X == 5 */
4067 return fold (build (EQ_EXPR, type, inner, comp_const));
4068
4069 else if (op_code == MAX_EXPR)
4070 /* MAX (X, 0) == -1 -> false */
4071 return omit_one_operand (type, integer_zero_node, inner);
4072
4073 else if (consts_equal)
4074 /* MIN (X, 0) == 0 -> X >= 0 */
4075 return fold (build (GE_EXPR, type, inner, comp_const));
4076
4077 else if (consts_lt)
4078 /* MIN (X, 0) == 5 -> false */
4079 return omit_one_operand (type, integer_zero_node, inner);
4080
4081 else
4082 /* MIN (X, 0) == -1 -> X == -1 */
4083 return fold (build (EQ_EXPR, type, inner, comp_const));
4084
4085 case GT_EXPR:
4086 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4087 /* MAX (X, 0) > 0 -> X > 0
4088 MAX (X, 0) > 5 -> X > 5 */
4089 return fold (build (GT_EXPR, type, inner, comp_const));
4090
4091 else if (op_code == MAX_EXPR)
4092 /* MAX (X, 0) > -1 -> true */
4093 return omit_one_operand (type, integer_one_node, inner);
4094
4095 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4096 /* MIN (X, 0) > 0 -> false
4097 MIN (X, 0) > 5 -> false */
4098 return omit_one_operand (type, integer_zero_node, inner);
4099
4100 else
4101 /* MIN (X, 0) > -1 -> X > -1 */
4102 return fold (build (GT_EXPR, type, inner, comp_const));
4103
4104 default:
4105 return t;
4106 }
4107 }
4108 \f
4109 /* T is an integer expression that is being multiplied, divided, or taken a
4110 modulus (CODE says which and what kind of divide or modulus) by a
4111 constant C. See if we can eliminate that operation by folding it with
4112 other operations already in T. WIDE_TYPE, if non-null, is a type that
4113 should be used for the computation if wider than our type.
4114
4115 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4116 (X * 2) + (Y * 4). We must, however, be assured that either the original
4117 expression would not overflow or that overflow is undefined for the type
4118 in the language in question.
4119
4120 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4121 the machine has a multiply-accumulate insn or that this is part of an
4122 addressing calculation.
4123
4124 If we return a non-null expression, it is an equivalent form of the
4125 original computation, but need not be in the original type. */
4126
4127 static tree
4128 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4129 {
4130 /* To avoid exponential search depth, refuse to allow recursion past
4131 three levels. Beyond that (1) it's highly unlikely that we'll find
4132 something interesting and (2) we've probably processed it before
4133 when we built the inner expression. */
4134
4135 static int depth;
4136 tree ret;
4137
4138 if (depth > 3)
4139 return NULL;
4140
4141 depth++;
4142 ret = extract_muldiv_1 (t, c, code, wide_type);
4143 depth--;
4144
4145 return ret;
4146 }
4147
4148 static tree
4149 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4150 {
4151 tree type = TREE_TYPE (t);
4152 enum tree_code tcode = TREE_CODE (t);
4153 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4154 > GET_MODE_SIZE (TYPE_MODE (type)))
4155 ? wide_type : type);
4156 tree t1, t2;
4157 int same_p = tcode == code;
4158 tree op0 = NULL_TREE, op1 = NULL_TREE;
4159
4160 /* Don't deal with constants of zero here; they confuse the code below. */
4161 if (integer_zerop (c))
4162 return NULL_TREE;
4163
4164 if (TREE_CODE_CLASS (tcode) == '1')
4165 op0 = TREE_OPERAND (t, 0);
4166
4167 if (TREE_CODE_CLASS (tcode) == '2')
4168 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4169
4170 /* Note that we need not handle conditional operations here since fold
4171 already handles those cases. So just do arithmetic here. */
4172 switch (tcode)
4173 {
4174 case INTEGER_CST:
4175 /* For a constant, we can always simplify if we are a multiply
4176 or (for divide and modulus) if it is a multiple of our constant. */
4177 if (code == MULT_EXPR
4178 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4179 return const_binop (code, convert (ctype, t), convert (ctype, c), 0);
4180 break;
4181
4182 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4183 /* If op0 is an expression ... */
4184 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4185 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4186 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4187 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4188 /* ... and is unsigned, and its type is smaller than ctype,
4189 then we cannot pass through as widening. */
4190 && ((TREE_UNSIGNED (TREE_TYPE (op0))
4191 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4192 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4193 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4194 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4195 /* ... or its type is larger than ctype,
4196 then we cannot pass through this truncation. */
4197 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4198 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4199 /* ... or signedness changes for division or modulus,
4200 then we cannot pass through this conversion. */
4201 || (code != MULT_EXPR
4202 && (TREE_UNSIGNED (ctype)
4203 != TREE_UNSIGNED (TREE_TYPE (op0))))))
4204 break;
4205
4206 /* Pass the constant down and see if we can make a simplification. If
4207 we can, replace this expression with the inner simplification for
4208 possible later conversion to our or some other type. */
4209 if ((t2 = convert (TREE_TYPE (op0), c)) != 0
4210 && TREE_CODE (t2) == INTEGER_CST
4211 && ! TREE_CONSTANT_OVERFLOW (t2)
4212 && (0 != (t1 = extract_muldiv (op0, t2, code,
4213 code == MULT_EXPR
4214 ? ctype : NULL_TREE))))
4215 return t1;
4216 break;
4217
4218 case NEGATE_EXPR: case ABS_EXPR:
4219 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4220 return fold (build1 (tcode, ctype, convert (ctype, t1)));
4221 break;
4222
4223 case MIN_EXPR: case MAX_EXPR:
4224 /* If widening the type changes the signedness, then we can't perform
4225 this optimization as that changes the result. */
4226 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4227 break;
4228
4229 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4230 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4231 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4232 {
4233 if (tree_int_cst_sgn (c) < 0)
4234 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4235
4236 return fold (build (tcode, ctype, convert (ctype, t1),
4237 convert (ctype, t2)));
4238 }
4239 break;
4240
4241 case WITH_RECORD_EXPR:
4242 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4243 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4244 TREE_OPERAND (t, 1));
4245 break;
4246
4247 case LSHIFT_EXPR: case RSHIFT_EXPR:
4248 /* If the second operand is constant, this is a multiplication
4249 or floor division, by a power of two, so we can treat it that
4250 way unless the multiplier or divisor overflows. */
4251 if (TREE_CODE (op1) == INTEGER_CST
4252 /* const_binop may not detect overflow correctly,
4253 so check for it explicitly here. */
4254 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4255 && TREE_INT_CST_HIGH (op1) == 0
4256 && 0 != (t1 = convert (ctype,
4257 const_binop (LSHIFT_EXPR, size_one_node,
4258 op1, 0)))
4259 && ! TREE_OVERFLOW (t1))
4260 return extract_muldiv (build (tcode == LSHIFT_EXPR
4261 ? MULT_EXPR : FLOOR_DIV_EXPR,
4262 ctype, convert (ctype, op0), t1),
4263 c, code, wide_type);
4264 break;
4265
4266 case PLUS_EXPR: case MINUS_EXPR:
4267 /* See if we can eliminate the operation on both sides. If we can, we
4268 can return a new PLUS or MINUS. If we can't, the only remaining
4269 cases where we can do anything are if the second operand is a
4270 constant. */
4271 t1 = extract_muldiv (op0, c, code, wide_type);
4272 t2 = extract_muldiv (op1, c, code, wide_type);
4273 if (t1 != 0 && t2 != 0
4274 && (code == MULT_EXPR
4275 /* If not multiplication, we can only do this if both operands
4276 are divisible by c. */
4277 || (multiple_of_p (ctype, op0, c)
4278 && multiple_of_p (ctype, op1, c))))
4279 return fold (build (tcode, ctype, convert (ctype, t1),
4280 convert (ctype, t2)));
4281
4282 /* If this was a subtraction, negate OP1 and set it to be an addition.
4283 This simplifies the logic below. */
4284 if (tcode == MINUS_EXPR)
4285 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4286
4287 if (TREE_CODE (op1) != INTEGER_CST)
4288 break;
4289
4290 /* If either OP1 or C are negative, this optimization is not safe for
4291 some of the division and remainder types while for others we need
4292 to change the code. */
4293 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4294 {
4295 if (code == CEIL_DIV_EXPR)
4296 code = FLOOR_DIV_EXPR;
4297 else if (code == FLOOR_DIV_EXPR)
4298 code = CEIL_DIV_EXPR;
4299 else if (code != MULT_EXPR
4300 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4301 break;
4302 }
4303
4304 /* If it's a multiply or a division/modulus operation of a multiple
4305 of our constant, do the operation and verify it doesn't overflow. */
4306 if (code == MULT_EXPR
4307 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4308 {
4309 op1 = const_binop (code, convert (ctype, op1), convert (ctype, c), 0);
4310 if (op1 == 0 || TREE_OVERFLOW (op1))
4311 break;
4312 }
4313 else
4314 break;
4315
4316 /* If we have an unsigned type is not a sizetype, we cannot widen
4317 the operation since it will change the result if the original
4318 computation overflowed. */
4319 if (TREE_UNSIGNED (ctype)
4320 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4321 && ctype != type)
4322 break;
4323
4324 /* If we were able to eliminate our operation from the first side,
4325 apply our operation to the second side and reform the PLUS. */
4326 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4327 return fold (build (tcode, ctype, convert (ctype, t1), op1));
4328
4329 /* The last case is if we are a multiply. In that case, we can
4330 apply the distributive law to commute the multiply and addition
4331 if the multiplication of the constants doesn't overflow. */
4332 if (code == MULT_EXPR)
4333 return fold (build (tcode, ctype, fold (build (code, ctype,
4334 convert (ctype, op0),
4335 convert (ctype, c))),
4336 op1));
4337
4338 break;
4339
4340 case MULT_EXPR:
4341 /* We have a special case here if we are doing something like
4342 (C * 8) % 4 since we know that's zero. */
4343 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4344 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4345 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4346 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4347 return omit_one_operand (type, integer_zero_node, op0);
4348
4349 /* ... fall through ... */
4350
4351 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4352 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4353 /* If we can extract our operation from the LHS, do so and return a
4354 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4355 do something only if the second operand is a constant. */
4356 if (same_p
4357 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4358 return fold (build (tcode, ctype, convert (ctype, t1),
4359 convert (ctype, op1)));
4360 else if (tcode == MULT_EXPR && code == MULT_EXPR
4361 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4362 return fold (build (tcode, ctype, convert (ctype, op0),
4363 convert (ctype, t1)));
4364 else if (TREE_CODE (op1) != INTEGER_CST)
4365 return 0;
4366
4367 /* If these are the same operation types, we can associate them
4368 assuming no overflow. */
4369 if (tcode == code
4370 && 0 != (t1 = const_binop (MULT_EXPR, convert (ctype, op1),
4371 convert (ctype, c), 0))
4372 && ! TREE_OVERFLOW (t1))
4373 return fold (build (tcode, ctype, convert (ctype, op0), t1));
4374
4375 /* If these operations "cancel" each other, we have the main
4376 optimizations of this pass, which occur when either constant is a
4377 multiple of the other, in which case we replace this with either an
4378 operation or CODE or TCODE.
4379
4380 If we have an unsigned type that is not a sizetype, we cannot do
4381 this since it will change the result if the original computation
4382 overflowed. */
4383 if ((! TREE_UNSIGNED (ctype)
4384 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4385 && ! flag_wrapv
4386 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4387 || (tcode == MULT_EXPR
4388 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4389 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4390 {
4391 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4392 return fold (build (tcode, ctype, convert (ctype, op0),
4393 convert (ctype,
4394 const_binop (TRUNC_DIV_EXPR,
4395 op1, c, 0))));
4396 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4397 return fold (build (code, ctype, convert (ctype, op0),
4398 convert (ctype,
4399 const_binop (TRUNC_DIV_EXPR,
4400 c, op1, 0))));
4401 }
4402 break;
4403
4404 default:
4405 break;
4406 }
4407
4408 return 0;
4409 }
4410 \f
4411 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4412 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4413 that we may sometimes modify the tree. */
4414
4415 static tree
4416 strip_compound_expr (tree t, tree s)
4417 {
4418 enum tree_code code = TREE_CODE (t);
4419
4420 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4421 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4422 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4423 return TREE_OPERAND (t, 1);
4424
4425 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4426 don't bother handling any other types. */
4427 else if (code == COND_EXPR)
4428 {
4429 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4430 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4431 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4432 }
4433 else if (TREE_CODE_CLASS (code) == '1')
4434 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4435 else if (TREE_CODE_CLASS (code) == '<'
4436 || TREE_CODE_CLASS (code) == '2')
4437 {
4438 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4439 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4440 }
4441
4442 return t;
4443 }
4444 \f
4445 /* Return a node which has the indicated constant VALUE (either 0 or
4446 1), and is of the indicated TYPE. */
4447
4448 static tree
4449 constant_boolean_node (int value, tree type)
4450 {
4451 if (type == integer_type_node)
4452 return value ? integer_one_node : integer_zero_node;
4453 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4454 return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4455 integer_zero_node);
4456 else
4457 {
4458 tree t = build_int_2 (value, 0);
4459
4460 TREE_TYPE (t) = type;
4461 return t;
4462 }
4463 }
4464
4465 /* Utility function for the following routine, to see how complex a nesting of
4466 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4467 we don't care (to avoid spending too much time on complex expressions.). */
4468
4469 static int
4470 count_cond (tree expr, int lim)
4471 {
4472 int ctrue, cfalse;
4473
4474 if (TREE_CODE (expr) != COND_EXPR)
4475 return 0;
4476 else if (lim <= 0)
4477 return 0;
4478
4479 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4480 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4481 return MIN (lim, 1 + ctrue + cfalse);
4482 }
4483
4484 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4485 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4486 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4487 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4488 COND is the first argument to CODE; otherwise (as in the example
4489 given here), it is the second argument. TYPE is the type of the
4490 original expression. */
4491
4492 static tree
4493 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
4494 tree cond, tree arg, int cond_first_p)
4495 {
4496 tree test, true_value, false_value;
4497 tree lhs = NULL_TREE;
4498 tree rhs = NULL_TREE;
4499 /* In the end, we'll produce a COND_EXPR. Both arms of the
4500 conditional expression will be binary operations. The left-hand
4501 side of the expression to be executed if the condition is true
4502 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4503 of the expression to be executed if the condition is true will be
4504 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4505 but apply to the expression to be executed if the conditional is
4506 false. */
4507 tree *true_lhs;
4508 tree *true_rhs;
4509 tree *false_lhs;
4510 tree *false_rhs;
4511 /* These are the codes to use for the left-hand side and right-hand
4512 side of the COND_EXPR. Normally, they are the same as CODE. */
4513 enum tree_code lhs_code = code;
4514 enum tree_code rhs_code = code;
4515 /* And these are the types of the expressions. */
4516 tree lhs_type = type;
4517 tree rhs_type = type;
4518 int save = 0;
4519
4520 if (cond_first_p)
4521 {
4522 true_rhs = false_rhs = &arg;
4523 true_lhs = &true_value;
4524 false_lhs = &false_value;
4525 }
4526 else
4527 {
4528 true_lhs = false_lhs = &arg;
4529 true_rhs = &true_value;
4530 false_rhs = &false_value;
4531 }
4532
4533 if (TREE_CODE (cond) == COND_EXPR)
4534 {
4535 test = TREE_OPERAND (cond, 0);
4536 true_value = TREE_OPERAND (cond, 1);
4537 false_value = TREE_OPERAND (cond, 2);
4538 /* If this operand throws an expression, then it does not make
4539 sense to try to perform a logical or arithmetic operation
4540 involving it. Instead of building `a + throw 3' for example,
4541 we simply build `a, throw 3'. */
4542 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4543 {
4544 if (! cond_first_p)
4545 {
4546 lhs_code = COMPOUND_EXPR;
4547 lhs_type = void_type_node;
4548 }
4549 else
4550 lhs = true_value;
4551 }
4552 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4553 {
4554 if (! cond_first_p)
4555 {
4556 rhs_code = COMPOUND_EXPR;
4557 rhs_type = void_type_node;
4558 }
4559 else
4560 rhs = false_value;
4561 }
4562 }
4563 else
4564 {
4565 tree testtype = TREE_TYPE (cond);
4566 test = cond;
4567 true_value = convert (testtype, integer_one_node);
4568 false_value = convert (testtype, integer_zero_node);
4569 }
4570
4571 /* If ARG is complex we want to make sure we only evaluate it once. Though
4572 this is only required if it is volatile, it might be more efficient even
4573 if it is not. However, if we succeed in folding one part to a constant,
4574 we do not need to make this SAVE_EXPR. Since we do this optimization
4575 primarily to see if we do end up with constant and this SAVE_EXPR
4576 interferes with later optimizations, suppressing it when we can is
4577 important.
4578
4579 If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4580 do so. Don't try to see if the result is a constant if an arm is a
4581 COND_EXPR since we get exponential behavior in that case. */
4582
4583 if (saved_expr_p (arg))
4584 save = 1;
4585 else if (lhs == 0 && rhs == 0
4586 && !TREE_CONSTANT (arg)
4587 && (*lang_hooks.decls.global_bindings_p) () == 0
4588 && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4589 || TREE_SIDE_EFFECTS (arg)))
4590 {
4591 if (TREE_CODE (true_value) != COND_EXPR)
4592 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4593
4594 if (TREE_CODE (false_value) != COND_EXPR)
4595 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4596
4597 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4598 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4599 {
4600 arg = save_expr (arg);
4601 lhs = rhs = 0;
4602 save = 1;
4603 }
4604 }
4605
4606 if (lhs == 0)
4607 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4608 if (rhs == 0)
4609 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4610
4611 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4612
4613 if (save)
4614 return build (COMPOUND_EXPR, type,
4615 convert (void_type_node, arg),
4616 strip_compound_expr (test, arg));
4617 else
4618 return convert (type, test);
4619 }
4620
4621 \f
4622 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4623
4624 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4625 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4626 ADDEND is the same as X.
4627
4628 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4629 and finite. The problematic cases are when X is zero, and its mode
4630 has signed zeros. In the case of rounding towards -infinity,
4631 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4632 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4633
4634 static bool
4635 fold_real_zero_addition_p (tree type, tree addend, int negate)
4636 {
4637 if (!real_zerop (addend))
4638 return false;
4639
4640 /* Don't allow the fold with -fsignaling-nans. */
4641 if (HONOR_SNANS (TYPE_MODE (type)))
4642 return false;
4643
4644 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4645 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4646 return true;
4647
4648 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4649 if (TREE_CODE (addend) == REAL_CST
4650 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4651 negate = !negate;
4652
4653 /* The mode has signed zeros, and we have to honor their sign.
4654 In this situation, there is only one case we can return true for.
4655 X - 0 is the same as X unless rounding towards -infinity is
4656 supported. */
4657 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4658 }
4659
4660 /* Subroutine of fold() that checks comparisons of built-in math
4661 functions against real constants.
4662
4663 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
4664 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
4665 is the type of the result and ARG0 and ARG1 are the operands of the
4666 comparison. ARG1 must be a TREE_REAL_CST.
4667
4668 The function returns the constant folded tree if a simplification
4669 can be made, and NULL_TREE otherwise. */
4670
4671 static tree
4672 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
4673 tree type, tree arg0, tree arg1)
4674 {
4675 REAL_VALUE_TYPE c;
4676
4677 if (fcode == BUILT_IN_SQRT
4678 || fcode == BUILT_IN_SQRTF
4679 || fcode == BUILT_IN_SQRTL)
4680 {
4681 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
4682 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
4683
4684 c = TREE_REAL_CST (arg1);
4685 if (REAL_VALUE_NEGATIVE (c))
4686 {
4687 /* sqrt(x) < y is always false, if y is negative. */
4688 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
4689 return omit_one_operand (type,
4690 convert (type, integer_zero_node),
4691 arg);
4692
4693 /* sqrt(x) > y is always true, if y is negative and we
4694 don't care about NaNs, i.e. negative values of x. */
4695 if (code == NE_EXPR || !HONOR_NANS (mode))
4696 return omit_one_operand (type,
4697 convert (type, integer_one_node),
4698 arg);
4699
4700 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
4701 return fold (build (GE_EXPR, type, arg,
4702 build_real (TREE_TYPE (arg), dconst0)));
4703 }
4704 else if (code == GT_EXPR || code == GE_EXPR)
4705 {
4706 REAL_VALUE_TYPE c2;
4707
4708 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4709 real_convert (&c2, mode, &c2);
4710
4711 if (REAL_VALUE_ISINF (c2))
4712 {
4713 /* sqrt(x) > y is x == +Inf, when y is very large. */
4714 if (HONOR_INFINITIES (mode))
4715 return fold (build (EQ_EXPR, type, arg,
4716 build_real (TREE_TYPE (arg), c2)));
4717
4718 /* sqrt(x) > y is always false, when y is very large
4719 and we don't care about infinities. */
4720 return omit_one_operand (type,
4721 convert (type, integer_zero_node),
4722 arg);
4723 }
4724
4725 /* sqrt(x) > c is the same as x > c*c. */
4726 return fold (build (code, type, arg,
4727 build_real (TREE_TYPE (arg), c2)));
4728 }
4729 else if (code == LT_EXPR || code == LE_EXPR)
4730 {
4731 REAL_VALUE_TYPE c2;
4732
4733 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4734 real_convert (&c2, mode, &c2);
4735
4736 if (REAL_VALUE_ISINF (c2))
4737 {
4738 /* sqrt(x) < y is always true, when y is a very large
4739 value and we don't care about NaNs or Infinities. */
4740 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
4741 return omit_one_operand (type,
4742 convert (type, integer_one_node),
4743 arg);
4744
4745 /* sqrt(x) < y is x != +Inf when y is very large and we
4746 don't care about NaNs. */
4747 if (! HONOR_NANS (mode))
4748 return fold (build (NE_EXPR, type, arg,
4749 build_real (TREE_TYPE (arg), c2)));
4750
4751 /* sqrt(x) < y is x >= 0 when y is very large and we
4752 don't care about Infinities. */
4753 if (! HONOR_INFINITIES (mode))
4754 return fold (build (GE_EXPR, type, arg,
4755 build_real (TREE_TYPE (arg), dconst0)));
4756
4757 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
4758 if ((*lang_hooks.decls.global_bindings_p) () != 0
4759 || CONTAINS_PLACEHOLDER_P (arg))
4760 return NULL_TREE;
4761
4762 arg = save_expr (arg);
4763 return fold (build (TRUTH_ANDIF_EXPR, type,
4764 fold (build (GE_EXPR, type, arg,
4765 build_real (TREE_TYPE (arg),
4766 dconst0))),
4767 fold (build (NE_EXPR, type, arg,
4768 build_real (TREE_TYPE (arg),
4769 c2)))));
4770 }
4771
4772 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
4773 if (! HONOR_NANS (mode))
4774 return fold (build (code, type, arg,
4775 build_real (TREE_TYPE (arg), c2)));
4776
4777 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
4778 if ((*lang_hooks.decls.global_bindings_p) () == 0
4779 && ! CONTAINS_PLACEHOLDER_P (arg))
4780 {
4781 arg = save_expr (arg);
4782 return fold (build (TRUTH_ANDIF_EXPR, type,
4783 fold (build (GE_EXPR, type, arg,
4784 build_real (TREE_TYPE (arg),
4785 dconst0))),
4786 fold (build (code, type, arg,
4787 build_real (TREE_TYPE (arg),
4788 c2)))));
4789 }
4790 }
4791 }
4792
4793 return NULL_TREE;
4794 }
4795
4796 /* Subroutine of fold() that optimizes comparisons against Infinities,
4797 either +Inf or -Inf.
4798
4799 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
4800 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
4801 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
4802
4803 The function returns the constant folded tree if a simplification
4804 can be made, and NULL_TREE otherwise. */
4805
4806 static tree
4807 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
4808 {
4809 enum machine_mode mode;
4810 REAL_VALUE_TYPE max;
4811 tree temp;
4812 bool neg;
4813
4814 mode = TYPE_MODE (TREE_TYPE (arg0));
4815
4816 /* For negative infinity swap the sense of the comparison. */
4817 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
4818 if (neg)
4819 code = swap_tree_comparison (code);
4820
4821 switch (code)
4822 {
4823 case GT_EXPR:
4824 /* x > +Inf is always false, if with ignore sNANs. */
4825 if (HONOR_SNANS (mode))
4826 return NULL_TREE;
4827 return omit_one_operand (type,
4828 convert (type, integer_zero_node),
4829 arg0);
4830
4831 case LE_EXPR:
4832 /* x <= +Inf is always true, if we don't case about NaNs. */
4833 if (! HONOR_NANS (mode))
4834 return omit_one_operand (type,
4835 convert (type, integer_one_node),
4836 arg0);
4837
4838 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
4839 if ((*lang_hooks.decls.global_bindings_p) () == 0
4840 && ! CONTAINS_PLACEHOLDER_P (arg0))
4841 {
4842 arg0 = save_expr (arg0);
4843 return fold (build (EQ_EXPR, type, arg0, arg0));
4844 }
4845 break;
4846
4847 case EQ_EXPR:
4848 case GE_EXPR:
4849 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
4850 real_maxval (&max, neg, mode);
4851 return fold (build (neg ? LT_EXPR : GT_EXPR, type,
4852 arg0, build_real (TREE_TYPE (arg0), max)));
4853
4854 case LT_EXPR:
4855 /* x < +Inf is always equal to x <= DBL_MAX. */
4856 real_maxval (&max, neg, mode);
4857 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
4858 arg0, build_real (TREE_TYPE (arg0), max)));
4859
4860 case NE_EXPR:
4861 /* x != +Inf is always equal to !(x > DBL_MAX). */
4862 real_maxval (&max, neg, mode);
4863 if (! HONOR_NANS (mode))
4864 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
4865 arg0, build_real (TREE_TYPE (arg0), max)));
4866 temp = fold (build (neg ? LT_EXPR : GT_EXPR, type,
4867 arg0, build_real (TREE_TYPE (arg0), max)));
4868 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
4869
4870 default:
4871 break;
4872 }
4873
4874 return NULL_TREE;
4875 }
4876
4877 /* If CODE with arguments ARG0 and ARG1 represents a single bit
4878 equality/inequality test, then return a simplified form of
4879 the test using shifts and logical operations. Otherwise return
4880 NULL. TYPE is the desired result type. */
4881
4882 tree
4883 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
4884 tree result_type)
4885 {
4886 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
4887 operand 0. */
4888 if (code == TRUTH_NOT_EXPR)
4889 {
4890 code = TREE_CODE (arg0);
4891 if (code != NE_EXPR && code != EQ_EXPR)
4892 return NULL_TREE;
4893
4894 /* Extract the arguments of the EQ/NE. */
4895 arg1 = TREE_OPERAND (arg0, 1);
4896 arg0 = TREE_OPERAND (arg0, 0);
4897
4898 /* This requires us to invert the code. */
4899 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
4900 }
4901
4902 /* If this is testing a single bit, we can optimize the test. */
4903 if ((code == NE_EXPR || code == EQ_EXPR)
4904 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
4905 && integer_pow2p (TREE_OPERAND (arg0, 1)))
4906 {
4907 tree inner = TREE_OPERAND (arg0, 0);
4908 tree type = TREE_TYPE (arg0);
4909 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
4910 enum machine_mode operand_mode = TYPE_MODE (type);
4911 int ops_unsigned;
4912 tree signed_type, unsigned_type;
4913 tree arg00;
4914
4915 /* If we have (A & C) != 0 where C is the sign bit of A, convert
4916 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
4917 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
4918 if (arg00 != NULL_TREE)
4919 {
4920 tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
4921 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, result_type,
4922 convert (stype, arg00),
4923 convert (stype, integer_zero_node)));
4924 }
4925
4926 /* At this point, we know that arg0 is not testing the sign bit. */
4927 if (TYPE_PRECISION (type) - 1 == bitnum)
4928 abort ();
4929
4930 /* Otherwise we have (A & C) != 0 where C is a single bit,
4931 convert that into ((A >> C2) & 1). Where C2 = log2(C).
4932 Similarly for (A & C) == 0. */
4933
4934 /* If INNER is a right shift of a constant and it plus BITNUM does
4935 not overflow, adjust BITNUM and INNER. */
4936 if (TREE_CODE (inner) == RSHIFT_EXPR
4937 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
4938 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
4939 && bitnum < TYPE_PRECISION (type)
4940 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
4941 bitnum - TYPE_PRECISION (type)))
4942 {
4943 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
4944 inner = TREE_OPERAND (inner, 0);
4945 }
4946
4947 /* If we are going to be able to omit the AND below, we must do our
4948 operations as unsigned. If we must use the AND, we have a choice.
4949 Normally unsigned is faster, but for some machines signed is. */
4950 #ifdef LOAD_EXTEND_OP
4951 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
4952 #else
4953 ops_unsigned = 1;
4954 #endif
4955
4956 signed_type = (*lang_hooks.types.type_for_mode) (operand_mode, 0);
4957 unsigned_type = (*lang_hooks.types.type_for_mode) (operand_mode, 1);
4958
4959 if (bitnum != 0)
4960 inner = build (RSHIFT_EXPR, ops_unsigned ? unsigned_type : signed_type,
4961 inner, size_int (bitnum));
4962
4963 if (code == EQ_EXPR)
4964 inner = build (BIT_XOR_EXPR, ops_unsigned ? unsigned_type : signed_type,
4965 inner, integer_one_node);
4966
4967 /* Put the AND last so it can combine with more things. */
4968 inner = build (BIT_AND_EXPR, ops_unsigned ? unsigned_type : signed_type,
4969 inner, integer_one_node);
4970
4971 /* Make sure to return the proper type. */
4972 if (TREE_TYPE (inner) != result_type)
4973 inner = convert (result_type, inner);
4974
4975 return inner;
4976 }
4977 return NULL_TREE;
4978 }
4979
4980 /* Test whether it is preferable two swap two operands, ARG0 and
4981 ARG1, for example because ARG0 is an integer constant and ARG1
4982 isn't. */
4983
4984 static bool
4985 tree_swap_operands_p (tree arg0, tree arg1)
4986 {
4987 STRIP_SIGN_NOPS (arg0);
4988 STRIP_SIGN_NOPS (arg1);
4989
4990 if (TREE_CODE (arg1) == INTEGER_CST)
4991 return 0;
4992 if (TREE_CODE (arg0) == INTEGER_CST)
4993 return 1;
4994
4995 if (TREE_CODE (arg1) == REAL_CST)
4996 return 0;
4997 if (TREE_CODE (arg0) == REAL_CST)
4998 return 1;
4999
5000 if (TREE_CODE (arg1) == COMPLEX_CST)
5001 return 0;
5002 if (TREE_CODE (arg0) == COMPLEX_CST)
5003 return 1;
5004
5005 if (TREE_CONSTANT (arg1))
5006 return 0;
5007 if (TREE_CONSTANT (arg0))
5008 return 1;
5009
5010 if (DECL_P (arg1))
5011 return 0;
5012 if (DECL_P (arg0))
5013 return 1;
5014
5015 if (TREE_CODE (arg1) == SAVE_EXPR)
5016 return 0;
5017 if (TREE_CODE (arg0) == SAVE_EXPR)
5018 return 1;
5019
5020 return 0;
5021 }
5022
5023 /* Perform constant folding and related simplification of EXPR.
5024 The related simplifications include x*1 => x, x*0 => 0, etc.,
5025 and application of the associative law.
5026 NOP_EXPR conversions may be removed freely (as long as we
5027 are careful not to change the C type of the overall expression)
5028 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5029 but we can constant-fold them if they have constant operands. */
5030
5031 #ifdef ENABLE_FOLD_CHECKING
5032 # define fold(x) fold_1 (x)
5033 static tree fold_1 (tree);
5034 static
5035 #endif
5036 tree
5037 fold (tree expr)
5038 {
5039 tree t = expr, orig_t;
5040 tree t1 = NULL_TREE;
5041 tree tem;
5042 tree type = TREE_TYPE (expr);
5043 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5044 enum tree_code code = TREE_CODE (t);
5045 int kind = TREE_CODE_CLASS (code);
5046 int invert;
5047 /* WINS will be nonzero when the switch is done
5048 if all operands are constant. */
5049 int wins = 1;
5050
5051 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5052 Likewise for a SAVE_EXPR that's already been evaluated. */
5053 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5054 return t;
5055
5056 /* Return right away if a constant. */
5057 if (kind == 'c')
5058 return t;
5059
5060 #ifdef MAX_INTEGER_COMPUTATION_MODE
5061 check_max_integer_computation_mode (expr);
5062 #endif
5063 orig_t = t;
5064
5065 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5066 {
5067 tree subop;
5068
5069 /* Special case for conversion ops that can have fixed point args. */
5070 arg0 = TREE_OPERAND (t, 0);
5071
5072 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5073 if (arg0 != 0)
5074 STRIP_SIGN_NOPS (arg0);
5075
5076 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5077 subop = TREE_REALPART (arg0);
5078 else
5079 subop = arg0;
5080
5081 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5082 && TREE_CODE (subop) != REAL_CST)
5083 /* Note that TREE_CONSTANT isn't enough:
5084 static var addresses are constant but we can't
5085 do arithmetic on them. */
5086 wins = 0;
5087 }
5088 else if (IS_EXPR_CODE_CLASS (kind))
5089 {
5090 int len = first_rtl_op (code);
5091 int i;
5092 for (i = 0; i < len; i++)
5093 {
5094 tree op = TREE_OPERAND (t, i);
5095 tree subop;
5096
5097 if (op == 0)
5098 continue; /* Valid for CALL_EXPR, at least. */
5099
5100 if (kind == '<' || code == RSHIFT_EXPR)
5101 {
5102 /* Signedness matters here. Perhaps we can refine this
5103 later. */
5104 STRIP_SIGN_NOPS (op);
5105 }
5106 else
5107 /* Strip any conversions that don't change the mode. */
5108 STRIP_NOPS (op);
5109
5110 if (TREE_CODE (op) == COMPLEX_CST)
5111 subop = TREE_REALPART (op);
5112 else
5113 subop = op;
5114
5115 if (TREE_CODE (subop) != INTEGER_CST
5116 && TREE_CODE (subop) != REAL_CST)
5117 /* Note that TREE_CONSTANT isn't enough:
5118 static var addresses are constant but we can't
5119 do arithmetic on them. */
5120 wins = 0;
5121
5122 if (i == 0)
5123 arg0 = op;
5124 else if (i == 1)
5125 arg1 = op;
5126 }
5127 }
5128
5129 /* If this is a commutative operation, and ARG0 is a constant, move it
5130 to ARG1 to reduce the number of tests below. */
5131 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
5132 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
5133 || code == BIT_AND_EXPR)
5134 && tree_swap_operands_p (arg0, arg1))
5135 return fold (build (code, type, arg1, arg0));
5136
5137 /* Now WINS is set as described above,
5138 ARG0 is the first operand of EXPR,
5139 and ARG1 is the second operand (if it has more than one operand).
5140
5141 First check for cases where an arithmetic operation is applied to a
5142 compound, conditional, or comparison operation. Push the arithmetic
5143 operation inside the compound or conditional to see if any folding
5144 can then be done. Convert comparison to conditional for this purpose.
5145 The also optimizes non-constant cases that used to be done in
5146 expand_expr.
5147
5148 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5149 one of the operands is a comparison and the other is a comparison, a
5150 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5151 code below would make the expression more complex. Change it to a
5152 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5153 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5154
5155 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5156 || code == EQ_EXPR || code == NE_EXPR)
5157 && ((truth_value_p (TREE_CODE (arg0))
5158 && (truth_value_p (TREE_CODE (arg1))
5159 || (TREE_CODE (arg1) == BIT_AND_EXPR
5160 && integer_onep (TREE_OPERAND (arg1, 1)))))
5161 || (truth_value_p (TREE_CODE (arg1))
5162 && (truth_value_p (TREE_CODE (arg0))
5163 || (TREE_CODE (arg0) == BIT_AND_EXPR
5164 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5165 {
5166 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5167 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5168 : TRUTH_XOR_EXPR,
5169 type, arg0, arg1));
5170
5171 if (code == EQ_EXPR)
5172 t = invert_truthvalue (t);
5173
5174 return t;
5175 }
5176
5177 if (TREE_CODE_CLASS (code) == '1')
5178 {
5179 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5180 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5181 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5182 else if (TREE_CODE (arg0) == COND_EXPR)
5183 {
5184 tree arg01 = TREE_OPERAND (arg0, 1);
5185 tree arg02 = TREE_OPERAND (arg0, 2);
5186 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5187 arg01 = fold (build1 (code, type, arg01));
5188 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5189 arg02 = fold (build1 (code, type, arg02));
5190 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5191 arg01, arg02));
5192
5193 /* If this was a conversion, and all we did was to move into
5194 inside the COND_EXPR, bring it back out. But leave it if
5195 it is a conversion from integer to integer and the
5196 result precision is no wider than a word since such a
5197 conversion is cheap and may be optimized away by combine,
5198 while it couldn't if it were outside the COND_EXPR. Then return
5199 so we don't get into an infinite recursion loop taking the
5200 conversion out and then back in. */
5201
5202 if ((code == NOP_EXPR || code == CONVERT_EXPR
5203 || code == NON_LVALUE_EXPR)
5204 && TREE_CODE (t) == COND_EXPR
5205 && TREE_CODE (TREE_OPERAND (t, 1)) == code
5206 && TREE_CODE (TREE_OPERAND (t, 2)) == code
5207 && ! VOID_TYPE_P (TREE_OPERAND (t, 1))
5208 && ! VOID_TYPE_P (TREE_OPERAND (t, 2))
5209 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
5210 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
5211 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
5212 && (INTEGRAL_TYPE_P
5213 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
5214 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
5215 t = build1 (code, type,
5216 build (COND_EXPR,
5217 TREE_TYPE (TREE_OPERAND
5218 (TREE_OPERAND (t, 1), 0)),
5219 TREE_OPERAND (t, 0),
5220 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
5221 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
5222 return t;
5223 }
5224 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5225 return fold (build (COND_EXPR, type, arg0,
5226 fold (build1 (code, type, integer_one_node)),
5227 fold (build1 (code, type, integer_zero_node))));
5228 }
5229 else if (TREE_CODE_CLASS (code) == '<'
5230 && TREE_CODE (arg0) == COMPOUND_EXPR)
5231 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5232 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5233 else if (TREE_CODE_CLASS (code) == '<'
5234 && TREE_CODE (arg1) == COMPOUND_EXPR)
5235 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5236 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5237 else if (TREE_CODE_CLASS (code) == '2'
5238 || TREE_CODE_CLASS (code) == '<')
5239 {
5240 if (TREE_CODE (arg1) == COMPOUND_EXPR
5241 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1, 0))
5242 && ! TREE_SIDE_EFFECTS (arg0))
5243 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5244 fold (build (code, type,
5245 arg0, TREE_OPERAND (arg1, 1))));
5246 else if ((TREE_CODE (arg1) == COND_EXPR
5247 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
5248 && TREE_CODE_CLASS (code) != '<'))
5249 && (TREE_CODE (arg0) != COND_EXPR
5250 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5251 && (! TREE_SIDE_EFFECTS (arg0)
5252 || ((*lang_hooks.decls.global_bindings_p) () == 0
5253 && ! CONTAINS_PLACEHOLDER_P (arg0))))
5254 return
5255 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5256 /*cond_first_p=*/0);
5257 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
5258 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5259 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5260 else if ((TREE_CODE (arg0) == COND_EXPR
5261 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5262 && TREE_CODE_CLASS (code) != '<'))
5263 && (TREE_CODE (arg1) != COND_EXPR
5264 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5265 && (! TREE_SIDE_EFFECTS (arg1)
5266 || ((*lang_hooks.decls.global_bindings_p) () == 0
5267 && ! CONTAINS_PLACEHOLDER_P (arg1))))
5268 return
5269 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5270 /*cond_first_p=*/1);
5271 }
5272
5273 switch (code)
5274 {
5275 case INTEGER_CST:
5276 case REAL_CST:
5277 case VECTOR_CST:
5278 case STRING_CST:
5279 case COMPLEX_CST:
5280 case CONSTRUCTOR:
5281 return t;
5282
5283 case CONST_DECL:
5284 return fold (DECL_INITIAL (t));
5285
5286 case NOP_EXPR:
5287 case FLOAT_EXPR:
5288 case CONVERT_EXPR:
5289 case FIX_TRUNC_EXPR:
5290 /* Other kinds of FIX are not handled properly by fold_convert. */
5291
5292 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
5293 return TREE_OPERAND (t, 0);
5294
5295 /* Handle cases of two conversions in a row. */
5296 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5297 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5298 {
5299 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5300 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5301 tree final_type = TREE_TYPE (t);
5302 int inside_int = INTEGRAL_TYPE_P (inside_type);
5303 int inside_ptr = POINTER_TYPE_P (inside_type);
5304 int inside_float = FLOAT_TYPE_P (inside_type);
5305 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5306 int inside_unsignedp = TREE_UNSIGNED (inside_type);
5307 int inter_int = INTEGRAL_TYPE_P (inter_type);
5308 int inter_ptr = POINTER_TYPE_P (inter_type);
5309 int inter_float = FLOAT_TYPE_P (inter_type);
5310 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5311 int inter_unsignedp = TREE_UNSIGNED (inter_type);
5312 int final_int = INTEGRAL_TYPE_P (final_type);
5313 int final_ptr = POINTER_TYPE_P (final_type);
5314 int final_float = FLOAT_TYPE_P (final_type);
5315 unsigned int final_prec = TYPE_PRECISION (final_type);
5316 int final_unsignedp = TREE_UNSIGNED (final_type);
5317
5318 /* In addition to the cases of two conversions in a row
5319 handled below, if we are converting something to its own
5320 type via an object of identical or wider precision, neither
5321 conversion is needed. */
5322 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
5323 && ((inter_int && final_int) || (inter_float && final_float))
5324 && inter_prec >= final_prec)
5325 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5326
5327 /* Likewise, if the intermediate and final types are either both
5328 float or both integer, we don't need the middle conversion if
5329 it is wider than the final type and doesn't change the signedness
5330 (for integers). Avoid this if the final type is a pointer
5331 since then we sometimes need the inner conversion. Likewise if
5332 the outer has a precision not equal to the size of its mode. */
5333 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5334 || (inter_float && inside_float))
5335 && inter_prec >= inside_prec
5336 && (inter_float || inter_unsignedp == inside_unsignedp)
5337 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5338 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5339 && ! final_ptr)
5340 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5341
5342 /* If we have a sign-extension of a zero-extended value, we can
5343 replace that by a single zero-extension. */
5344 if (inside_int && inter_int && final_int
5345 && inside_prec < inter_prec && inter_prec < final_prec
5346 && inside_unsignedp && !inter_unsignedp)
5347 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5348
5349 /* Two conversions in a row are not needed unless:
5350 - some conversion is floating-point (overstrict for now), or
5351 - the intermediate type is narrower than both initial and
5352 final, or
5353 - the intermediate type and innermost type differ in signedness,
5354 and the outermost type is wider than the intermediate, or
5355 - the initial type is a pointer type and the precisions of the
5356 intermediate and final types differ, or
5357 - the final type is a pointer type and the precisions of the
5358 initial and intermediate types differ. */
5359 if (! inside_float && ! inter_float && ! final_float
5360 && (inter_prec > inside_prec || inter_prec > final_prec)
5361 && ! (inside_int && inter_int
5362 && inter_unsignedp != inside_unsignedp
5363 && inter_prec < final_prec)
5364 && ((inter_unsignedp && inter_prec > inside_prec)
5365 == (final_unsignedp && final_prec > inter_prec))
5366 && ! (inside_ptr && inter_prec != final_prec)
5367 && ! (final_ptr && inside_prec != inter_prec)
5368 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5369 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5370 && ! final_ptr)
5371 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5372 }
5373
5374 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5375 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5376 /* Detect assigning a bitfield. */
5377 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5378 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5379 {
5380 /* Don't leave an assignment inside a conversion
5381 unless assigning a bitfield. */
5382 tree prev = TREE_OPERAND (t, 0);
5383 if (t == orig_t)
5384 t = copy_node (t);
5385 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
5386 /* First do the assignment, then return converted constant. */
5387 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
5388 TREE_USED (t) = 1;
5389 return t;
5390 }
5391
5392 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5393 constants (if x has signed type, the sign bit cannot be set
5394 in c). This folds extension into the BIT_AND_EXPR. */
5395 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
5396 && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
5397 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5398 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5399 {
5400 tree and = TREE_OPERAND (t, 0);
5401 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5402 int change = 0;
5403
5404 if (TREE_UNSIGNED (TREE_TYPE (and))
5405 || (TYPE_PRECISION (TREE_TYPE (t))
5406 <= TYPE_PRECISION (TREE_TYPE (and))))
5407 change = 1;
5408 else if (TYPE_PRECISION (TREE_TYPE (and1))
5409 <= HOST_BITS_PER_WIDE_INT
5410 && host_integerp (and1, 1))
5411 {
5412 unsigned HOST_WIDE_INT cst;
5413
5414 cst = tree_low_cst (and1, 1);
5415 cst &= (HOST_WIDE_INT) -1
5416 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5417 change = (cst == 0);
5418 #ifdef LOAD_EXTEND_OP
5419 if (change
5420 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5421 == ZERO_EXTEND))
5422 {
5423 tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
5424 and0 = convert (uns, and0);
5425 and1 = convert (uns, and1);
5426 }
5427 #endif
5428 }
5429 if (change)
5430 return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
5431 convert (TREE_TYPE (t), and0),
5432 convert (TREE_TYPE (t), and1)));
5433 }
5434
5435 if (!wins)
5436 {
5437 if (TREE_CONSTANT (t) != TREE_CONSTANT (arg0))
5438 {
5439 if (t == orig_t)
5440 t = copy_node (t);
5441 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
5442 }
5443 return t;
5444 }
5445 return fold_convert (t, arg0);
5446
5447 case VIEW_CONVERT_EXPR:
5448 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5449 return build1 (VIEW_CONVERT_EXPR, type,
5450 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5451 return t;
5452
5453 case COMPONENT_REF:
5454 if (TREE_CODE (arg0) == CONSTRUCTOR
5455 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
5456 {
5457 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5458 if (m)
5459 t = TREE_VALUE (m);
5460 }
5461 return t;
5462
5463 case RANGE_EXPR:
5464 if (TREE_CONSTANT (t) != wins)
5465 {
5466 if (t == orig_t)
5467 t = copy_node (t);
5468 TREE_CONSTANT (t) = wins;
5469 }
5470 return t;
5471
5472 case NEGATE_EXPR:
5473 if (wins)
5474 {
5475 if (TREE_CODE (arg0) == INTEGER_CST)
5476 {
5477 unsigned HOST_WIDE_INT low;
5478 HOST_WIDE_INT high;
5479 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5480 TREE_INT_CST_HIGH (arg0),
5481 &low, &high);
5482 t = build_int_2 (low, high);
5483 TREE_TYPE (t) = type;
5484 TREE_OVERFLOW (t)
5485 = (TREE_OVERFLOW (arg0)
5486 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
5487 TREE_CONSTANT_OVERFLOW (t)
5488 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5489 }
5490 else if (TREE_CODE (arg0) == REAL_CST)
5491 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5492 }
5493 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5494 return TREE_OPERAND (arg0, 0);
5495 /* Convert -((double)float) into (double)(-float). */
5496 else if (TREE_CODE (arg0) == NOP_EXPR
5497 && TREE_CODE (type) == REAL_TYPE)
5498 {
5499 tree targ0 = strip_float_extensions (arg0);
5500 if (targ0 != arg0)
5501 return convert (type, build1 (NEGATE_EXPR, TREE_TYPE (targ0), targ0));
5502
5503 }
5504
5505 /* Convert - (a - b) to (b - a) for non-floating-point. */
5506 else if (TREE_CODE (arg0) == MINUS_EXPR
5507 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
5508 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
5509 TREE_OPERAND (arg0, 0));
5510
5511 /* Convert -f(x) into f(-x) where f is sin, tan or atan. */
5512 switch (builtin_mathfn_code (arg0))
5513 {
5514 case BUILT_IN_SIN:
5515 case BUILT_IN_SINF:
5516 case BUILT_IN_SINL:
5517 case BUILT_IN_TAN:
5518 case BUILT_IN_TANF:
5519 case BUILT_IN_TANL:
5520 case BUILT_IN_ATAN:
5521 case BUILT_IN_ATANF:
5522 case BUILT_IN_ATANL:
5523 if (negate_expr_p (TREE_VALUE (TREE_OPERAND (arg0, 1))))
5524 {
5525 tree fndecl, arg, arglist;
5526
5527 fndecl = get_callee_fndecl (arg0);
5528 arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5529 arg = fold (build1 (NEGATE_EXPR, type, arg));
5530 arglist = build_tree_list (NULL_TREE, arg);
5531 return build_function_call_expr (fndecl, arglist);
5532 }
5533 break;
5534
5535 default:
5536 break;
5537 }
5538 return t;
5539
5540 case ABS_EXPR:
5541 if (wins)
5542 {
5543 if (TREE_CODE (arg0) == INTEGER_CST)
5544 {
5545 /* If the value is unsigned, then the absolute value is
5546 the same as the ordinary value. */
5547 if (TREE_UNSIGNED (type))
5548 return arg0;
5549 /* Similarly, if the value is non-negative. */
5550 else if (INT_CST_LT (integer_minus_one_node, arg0))
5551 return arg0;
5552 /* If the value is negative, then the absolute value is
5553 its negation. */
5554 else
5555 {
5556 unsigned HOST_WIDE_INT low;
5557 HOST_WIDE_INT high;
5558 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5559 TREE_INT_CST_HIGH (arg0),
5560 &low, &high);
5561 t = build_int_2 (low, high);
5562 TREE_TYPE (t) = type;
5563 TREE_OVERFLOW (t)
5564 = (TREE_OVERFLOW (arg0)
5565 | force_fit_type (t, overflow));
5566 TREE_CONSTANT_OVERFLOW (t)
5567 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5568 }
5569 }
5570 else if (TREE_CODE (arg0) == REAL_CST)
5571 {
5572 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5573 t = build_real (type,
5574 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5575 }
5576 }
5577 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5578 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
5579 /* Convert fabs((double)float) into (double)fabsf(float). */
5580 else if (TREE_CODE (arg0) == NOP_EXPR
5581 && TREE_CODE (type) == REAL_TYPE)
5582 {
5583 tree targ0 = strip_float_extensions (arg0);
5584 if (targ0 != arg0)
5585 return convert (type, fold (build1 (ABS_EXPR, TREE_TYPE (targ0),
5586 targ0)));
5587 }
5588 else if (tree_expr_nonnegative_p (arg0))
5589 return arg0;
5590 return t;
5591
5592 case CONJ_EXPR:
5593 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5594 return convert (type, arg0);
5595 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5596 return build (COMPLEX_EXPR, type,
5597 TREE_OPERAND (arg0, 0),
5598 negate_expr (TREE_OPERAND (arg0, 1)));
5599 else if (TREE_CODE (arg0) == COMPLEX_CST)
5600 return build_complex (type, TREE_REALPART (arg0),
5601 negate_expr (TREE_IMAGPART (arg0)));
5602 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5603 return fold (build (TREE_CODE (arg0), type,
5604 fold (build1 (CONJ_EXPR, type,
5605 TREE_OPERAND (arg0, 0))),
5606 fold (build1 (CONJ_EXPR,
5607 type, TREE_OPERAND (arg0, 1)))));
5608 else if (TREE_CODE (arg0) == CONJ_EXPR)
5609 return TREE_OPERAND (arg0, 0);
5610 return t;
5611
5612 case BIT_NOT_EXPR:
5613 if (wins)
5614 {
5615 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5616 ~ TREE_INT_CST_HIGH (arg0));
5617 TREE_TYPE (t) = type;
5618 force_fit_type (t, 0);
5619 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5620 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5621 }
5622 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5623 return TREE_OPERAND (arg0, 0);
5624 return t;
5625
5626 case PLUS_EXPR:
5627 /* A + (-B) -> A - B */
5628 if (TREE_CODE (arg1) == NEGATE_EXPR)
5629 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5630 /* (-A) + B -> B - A */
5631 if (TREE_CODE (arg0) == NEGATE_EXPR)
5632 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5633 else if (! FLOAT_TYPE_P (type))
5634 {
5635 if (integer_zerop (arg1))
5636 return non_lvalue (convert (type, arg0));
5637
5638 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5639 with a constant, and the two constants have no bits in common,
5640 we should treat this as a BIT_IOR_EXPR since this may produce more
5641 simplifications. */
5642 if (TREE_CODE (arg0) == BIT_AND_EXPR
5643 && TREE_CODE (arg1) == BIT_AND_EXPR
5644 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5645 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5646 && integer_zerop (const_binop (BIT_AND_EXPR,
5647 TREE_OPERAND (arg0, 1),
5648 TREE_OPERAND (arg1, 1), 0)))
5649 {
5650 code = BIT_IOR_EXPR;
5651 goto bit_ior;
5652 }
5653
5654 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5655 (plus (plus (mult) (mult)) (foo)) so that we can
5656 take advantage of the factoring cases below. */
5657 if ((TREE_CODE (arg0) == PLUS_EXPR
5658 && TREE_CODE (arg1) == MULT_EXPR)
5659 || (TREE_CODE (arg1) == PLUS_EXPR
5660 && TREE_CODE (arg0) == MULT_EXPR))
5661 {
5662 tree parg0, parg1, parg, marg;
5663
5664 if (TREE_CODE (arg0) == PLUS_EXPR)
5665 parg = arg0, marg = arg1;
5666 else
5667 parg = arg1, marg = arg0;
5668 parg0 = TREE_OPERAND (parg, 0);
5669 parg1 = TREE_OPERAND (parg, 1);
5670 STRIP_NOPS (parg0);
5671 STRIP_NOPS (parg1);
5672
5673 if (TREE_CODE (parg0) == MULT_EXPR
5674 && TREE_CODE (parg1) != MULT_EXPR)
5675 return fold (build (PLUS_EXPR, type,
5676 fold (build (PLUS_EXPR, type,
5677 convert (type, parg0),
5678 convert (type, marg))),
5679 convert (type, parg1)));
5680 if (TREE_CODE (parg0) != MULT_EXPR
5681 && TREE_CODE (parg1) == MULT_EXPR)
5682 return fold (build (PLUS_EXPR, type,
5683 fold (build (PLUS_EXPR, type,
5684 convert (type, parg1),
5685 convert (type, marg))),
5686 convert (type, parg0)));
5687 }
5688
5689 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5690 {
5691 tree arg00, arg01, arg10, arg11;
5692 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5693
5694 /* (A * C) + (B * C) -> (A+B) * C.
5695 We are most concerned about the case where C is a constant,
5696 but other combinations show up during loop reduction. Since
5697 it is not difficult, try all four possibilities. */
5698
5699 arg00 = TREE_OPERAND (arg0, 0);
5700 arg01 = TREE_OPERAND (arg0, 1);
5701 arg10 = TREE_OPERAND (arg1, 0);
5702 arg11 = TREE_OPERAND (arg1, 1);
5703 same = NULL_TREE;
5704
5705 if (operand_equal_p (arg01, arg11, 0))
5706 same = arg01, alt0 = arg00, alt1 = arg10;
5707 else if (operand_equal_p (arg00, arg10, 0))
5708 same = arg00, alt0 = arg01, alt1 = arg11;
5709 else if (operand_equal_p (arg00, arg11, 0))
5710 same = arg00, alt0 = arg01, alt1 = arg10;
5711 else if (operand_equal_p (arg01, arg10, 0))
5712 same = arg01, alt0 = arg00, alt1 = arg11;
5713
5714 /* No identical multiplicands; see if we can find a common
5715 power-of-two factor in non-power-of-two multiplies. This
5716 can help in multi-dimensional array access. */
5717 else if (TREE_CODE (arg01) == INTEGER_CST
5718 && TREE_CODE (arg11) == INTEGER_CST
5719 && TREE_INT_CST_HIGH (arg01) == 0
5720 && TREE_INT_CST_HIGH (arg11) == 0)
5721 {
5722 HOST_WIDE_INT int01, int11, tmp;
5723 int01 = TREE_INT_CST_LOW (arg01);
5724 int11 = TREE_INT_CST_LOW (arg11);
5725
5726 /* Move min of absolute values to int11. */
5727 if ((int01 >= 0 ? int01 : -int01)
5728 < (int11 >= 0 ? int11 : -int11))
5729 {
5730 tmp = int01, int01 = int11, int11 = tmp;
5731 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5732 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5733 }
5734
5735 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5736 {
5737 alt0 = fold (build (MULT_EXPR, type, arg00,
5738 build_int_2 (int01 / int11, 0)));
5739 alt1 = arg10;
5740 same = arg11;
5741 }
5742 }
5743
5744 if (same)
5745 return fold (build (MULT_EXPR, type,
5746 fold (build (PLUS_EXPR, type, alt0, alt1)),
5747 same));
5748 }
5749 }
5750 else
5751 {
5752 /* See if ARG1 is zero and X + ARG1 reduces to X. */
5753 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
5754 return non_lvalue (convert (type, arg0));
5755
5756 /* Likewise if the operands are reversed. */
5757 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5758 return non_lvalue (convert (type, arg1));
5759
5760 /* Convert x+x into x*2.0. */
5761 if (operand_equal_p (arg0, arg1, 0)
5762 && SCALAR_FLOAT_TYPE_P (type))
5763 return fold (build (MULT_EXPR, type, arg0,
5764 build_real (type, dconst2)));
5765
5766 /* Convert x*c+x into x*(c+1). */
5767 if (flag_unsafe_math_optimizations
5768 && TREE_CODE (arg0) == MULT_EXPR
5769 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
5770 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
5771 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
5772 {
5773 REAL_VALUE_TYPE c;
5774
5775 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
5776 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
5777 return fold (build (MULT_EXPR, type, arg1,
5778 build_real (type, c)));
5779 }
5780
5781 /* Convert x+x*c into x*(c+1). */
5782 if (flag_unsafe_math_optimizations
5783 && TREE_CODE (arg1) == MULT_EXPR
5784 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
5785 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
5786 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
5787 {
5788 REAL_VALUE_TYPE c;
5789
5790 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
5791 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
5792 return fold (build (MULT_EXPR, type, arg0,
5793 build_real (type, c)));
5794 }
5795
5796 /* Convert x*c1+x*c2 into x*(c1+c2). */
5797 if (flag_unsafe_math_optimizations
5798 && TREE_CODE (arg0) == MULT_EXPR
5799 && TREE_CODE (arg1) == MULT_EXPR
5800 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
5801 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
5802 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
5803 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
5804 && operand_equal_p (TREE_OPERAND (arg0, 0),
5805 TREE_OPERAND (arg1, 0), 0))
5806 {
5807 REAL_VALUE_TYPE c1, c2;
5808
5809 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
5810 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
5811 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
5812 return fold (build (MULT_EXPR, type,
5813 TREE_OPERAND (arg0, 0),
5814 build_real (type, c1)));
5815 }
5816 }
5817
5818 bit_rotate:
5819 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5820 is a rotate of A by C1 bits. */
5821 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5822 is a rotate of A by B bits. */
5823 {
5824 enum tree_code code0, code1;
5825 code0 = TREE_CODE (arg0);
5826 code1 = TREE_CODE (arg1);
5827 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5828 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5829 && operand_equal_p (TREE_OPERAND (arg0, 0),
5830 TREE_OPERAND (arg1, 0), 0)
5831 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5832 {
5833 tree tree01, tree11;
5834 enum tree_code code01, code11;
5835
5836 tree01 = TREE_OPERAND (arg0, 1);
5837 tree11 = TREE_OPERAND (arg1, 1);
5838 STRIP_NOPS (tree01);
5839 STRIP_NOPS (tree11);
5840 code01 = TREE_CODE (tree01);
5841 code11 = TREE_CODE (tree11);
5842 if (code01 == INTEGER_CST
5843 && code11 == INTEGER_CST
5844 && TREE_INT_CST_HIGH (tree01) == 0
5845 && TREE_INT_CST_HIGH (tree11) == 0
5846 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5847 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5848 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5849 code0 == LSHIFT_EXPR ? tree01 : tree11);
5850 else if (code11 == MINUS_EXPR)
5851 {
5852 tree tree110, tree111;
5853 tree110 = TREE_OPERAND (tree11, 0);
5854 tree111 = TREE_OPERAND (tree11, 1);
5855 STRIP_NOPS (tree110);
5856 STRIP_NOPS (tree111);
5857 if (TREE_CODE (tree110) == INTEGER_CST
5858 && 0 == compare_tree_int (tree110,
5859 TYPE_PRECISION
5860 (TREE_TYPE (TREE_OPERAND
5861 (arg0, 0))))
5862 && operand_equal_p (tree01, tree111, 0))
5863 return build ((code0 == LSHIFT_EXPR
5864 ? LROTATE_EXPR
5865 : RROTATE_EXPR),
5866 type, TREE_OPERAND (arg0, 0), tree01);
5867 }
5868 else if (code01 == MINUS_EXPR)
5869 {
5870 tree tree010, tree011;
5871 tree010 = TREE_OPERAND (tree01, 0);
5872 tree011 = TREE_OPERAND (tree01, 1);
5873 STRIP_NOPS (tree010);
5874 STRIP_NOPS (tree011);
5875 if (TREE_CODE (tree010) == INTEGER_CST
5876 && 0 == compare_tree_int (tree010,
5877 TYPE_PRECISION
5878 (TREE_TYPE (TREE_OPERAND
5879 (arg0, 0))))
5880 && operand_equal_p (tree11, tree011, 0))
5881 return build ((code0 != LSHIFT_EXPR
5882 ? LROTATE_EXPR
5883 : RROTATE_EXPR),
5884 type, TREE_OPERAND (arg0, 0), tree11);
5885 }
5886 }
5887 }
5888
5889 associate:
5890 /* In most languages, can't associate operations on floats through
5891 parentheses. Rather than remember where the parentheses were, we
5892 don't associate floats at all, unless the user has specified
5893 -funsafe-math-optimizations. */
5894
5895 if (! wins
5896 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
5897 {
5898 tree var0, con0, lit0, minus_lit0;
5899 tree var1, con1, lit1, minus_lit1;
5900
5901 /* Split both trees into variables, constants, and literals. Then
5902 associate each group together, the constants with literals,
5903 then the result with variables. This increases the chances of
5904 literals being recombined later and of generating relocatable
5905 expressions for the sum of a constant and literal. */
5906 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
5907 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
5908 code == MINUS_EXPR);
5909
5910 /* Only do something if we found more than two objects. Otherwise,
5911 nothing has changed and we risk infinite recursion. */
5912 if (2 < ((var0 != 0) + (var1 != 0)
5913 + (con0 != 0) + (con1 != 0)
5914 + (lit0 != 0) + (lit1 != 0)
5915 + (minus_lit0 != 0) + (minus_lit1 != 0)))
5916 {
5917 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
5918 if (code == MINUS_EXPR)
5919 code = PLUS_EXPR;
5920
5921 var0 = associate_trees (var0, var1, code, type);
5922 con0 = associate_trees (con0, con1, code, type);
5923 lit0 = associate_trees (lit0, lit1, code, type);
5924 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
5925
5926 /* Preserve the MINUS_EXPR if the negative part of the literal is
5927 greater than the positive part. Otherwise, the multiplicative
5928 folding code (i.e extract_muldiv) may be fooled in case
5929 unsigned constants are subtracted, like in the following
5930 example: ((X*2 + 4) - 8U)/2. */
5931 if (minus_lit0 && lit0)
5932 {
5933 if (TREE_CODE (lit0) == INTEGER_CST
5934 && TREE_CODE (minus_lit0) == INTEGER_CST
5935 && tree_int_cst_lt (lit0, minus_lit0))
5936 {
5937 minus_lit0 = associate_trees (minus_lit0, lit0,
5938 MINUS_EXPR, type);
5939 lit0 = 0;
5940 }
5941 else
5942 {
5943 lit0 = associate_trees (lit0, minus_lit0,
5944 MINUS_EXPR, type);
5945 minus_lit0 = 0;
5946 }
5947 }
5948 if (minus_lit0)
5949 {
5950 if (con0 == 0)
5951 return convert (type, associate_trees (var0, minus_lit0,
5952 MINUS_EXPR, type));
5953 else
5954 {
5955 con0 = associate_trees (con0, minus_lit0,
5956 MINUS_EXPR, type);
5957 return convert (type, associate_trees (var0, con0,
5958 PLUS_EXPR, type));
5959 }
5960 }
5961
5962 con0 = associate_trees (con0, lit0, code, type);
5963 return convert (type, associate_trees (var0, con0, code, type));
5964 }
5965 }
5966
5967 binary:
5968 if (wins)
5969 t1 = const_binop (code, arg0, arg1, 0);
5970 if (t1 != NULL_TREE)
5971 {
5972 /* The return value should always have
5973 the same type as the original expression. */
5974 if (TREE_TYPE (t1) != TREE_TYPE (t))
5975 t1 = convert (TREE_TYPE (t), t1);
5976
5977 return t1;
5978 }
5979 return t;
5980
5981 case MINUS_EXPR:
5982 /* A - (-B) -> A + B */
5983 if (TREE_CODE (arg1) == NEGATE_EXPR)
5984 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5985 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
5986 if (TREE_CODE (arg0) == NEGATE_EXPR
5987 && (FLOAT_TYPE_P (type)
5988 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
5989 && negate_expr_p (arg1)
5990 && (! TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
5991 && (! TREE_SIDE_EFFECTS (arg1) || TREE_CONSTANT (arg0)))
5992 return fold (build (MINUS_EXPR, type, negate_expr (arg1),
5993 TREE_OPERAND (arg0, 0)));
5994
5995 if (! FLOAT_TYPE_P (type))
5996 {
5997 if (! wins && integer_zerop (arg0))
5998 return negate_expr (convert (type, arg1));
5999 if (integer_zerop (arg1))
6000 return non_lvalue (convert (type, arg0));
6001
6002 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
6003 about the case where C is a constant, just try one of the
6004 four possibilities. */
6005
6006 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
6007 && operand_equal_p (TREE_OPERAND (arg0, 1),
6008 TREE_OPERAND (arg1, 1), 0))
6009 return fold (build (MULT_EXPR, type,
6010 fold (build (MINUS_EXPR, type,
6011 TREE_OPERAND (arg0, 0),
6012 TREE_OPERAND (arg1, 0))),
6013 TREE_OPERAND (arg0, 1)));
6014
6015 /* Fold A - (A & B) into ~B & A. */
6016 if (!TREE_SIDE_EFFECTS (arg0)
6017 && TREE_CODE (arg1) == BIT_AND_EXPR)
6018 {
6019 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6020 return fold (build (BIT_AND_EXPR, type,
6021 fold (build1 (BIT_NOT_EXPR, type,
6022 TREE_OPERAND (arg1, 0))),
6023 arg0));
6024 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6025 return fold (build (BIT_AND_EXPR, type,
6026 fold (build1 (BIT_NOT_EXPR, type,
6027 TREE_OPERAND (arg1, 1))),
6028 arg0));
6029 }
6030 }
6031
6032 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6033 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6034 return non_lvalue (convert (type, arg0));
6035
6036 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6037 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6038 (-ARG1 + ARG0) reduces to -ARG1. */
6039 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6040 return negate_expr (convert (type, arg1));
6041
6042 /* Fold &x - &x. This can happen from &x.foo - &x.
6043 This is unsafe for certain floats even in non-IEEE formats.
6044 In IEEE, it is unsafe because it does wrong for NaNs.
6045 Also note that operand_equal_p is always false if an operand
6046 is volatile. */
6047
6048 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6049 && operand_equal_p (arg0, arg1, 0))
6050 return convert (type, integer_zero_node);
6051
6052 goto associate;
6053
6054 case MULT_EXPR:
6055 /* (-A) * (-B) -> A * B */
6056 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6057 return fold (build (MULT_EXPR, type,
6058 TREE_OPERAND (arg0, 0),
6059 negate_expr (arg1)));
6060 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6061 return fold (build (MULT_EXPR, type,
6062 negate_expr (arg0),
6063 TREE_OPERAND (arg1, 0)));
6064
6065 if (! FLOAT_TYPE_P (type))
6066 {
6067 if (integer_zerop (arg1))
6068 return omit_one_operand (type, arg1, arg0);
6069 if (integer_onep (arg1))
6070 return non_lvalue (convert (type, arg0));
6071
6072 /* (a * (1 << b)) is (a << b) */
6073 if (TREE_CODE (arg1) == LSHIFT_EXPR
6074 && integer_onep (TREE_OPERAND (arg1, 0)))
6075 return fold (build (LSHIFT_EXPR, type, arg0,
6076 TREE_OPERAND (arg1, 1)));
6077 if (TREE_CODE (arg0) == LSHIFT_EXPR
6078 && integer_onep (TREE_OPERAND (arg0, 0)))
6079 return fold (build (LSHIFT_EXPR, type, arg1,
6080 TREE_OPERAND (arg0, 1)));
6081
6082 if (TREE_CODE (arg1) == INTEGER_CST
6083 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6084 convert (type, arg1),
6085 code, NULL_TREE)))
6086 return convert (type, tem);
6087
6088 }
6089 else
6090 {
6091 /* Maybe fold x * 0 to 0. The expressions aren't the same
6092 when x is NaN, since x * 0 is also NaN. Nor are they the
6093 same in modes with signed zeros, since multiplying a
6094 negative value by 0 gives -0, not +0. */
6095 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6096 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6097 && real_zerop (arg1))
6098 return omit_one_operand (type, arg1, arg0);
6099 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6100 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6101 && real_onep (arg1))
6102 return non_lvalue (convert (type, arg0));
6103
6104 /* Transform x * -1.0 into -x. */
6105 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6106 && real_minus_onep (arg1))
6107 return fold (build1 (NEGATE_EXPR, type, arg0));
6108
6109 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6110 if (flag_unsafe_math_optimizations
6111 && TREE_CODE (arg0) == RDIV_EXPR
6112 && TREE_CODE (arg1) == REAL_CST
6113 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
6114 {
6115 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
6116 arg1, 0);
6117 if (tem)
6118 return fold (build (RDIV_EXPR, type, tem,
6119 TREE_OPERAND (arg0, 1)));
6120 }
6121
6122 if (flag_unsafe_math_optimizations)
6123 {
6124 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6125 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6126
6127 /* Optimizations of sqrt(...)*sqrt(...). */
6128 if ((fcode0 == BUILT_IN_SQRT && fcode1 == BUILT_IN_SQRT)
6129 || (fcode0 == BUILT_IN_SQRTF && fcode1 == BUILT_IN_SQRTF)
6130 || (fcode0 == BUILT_IN_SQRTL && fcode1 == BUILT_IN_SQRTL))
6131 {
6132 tree sqrtfn, arg, arglist;
6133 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6134 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6135
6136 /* Optimize sqrt(x)*sqrt(x) as x. */
6137 if (operand_equal_p (arg00, arg10, 0)
6138 && ! HONOR_SNANS (TYPE_MODE (type)))
6139 return arg00;
6140
6141 /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y). */
6142 sqrtfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6143 arg = fold (build (MULT_EXPR, type, arg00, arg10));
6144 arglist = build_tree_list (NULL_TREE, arg);
6145 return build_function_call_expr (sqrtfn, arglist);
6146 }
6147
6148 /* Optimize expN(x)*expN(y) as expN(x+y). */
6149 if (fcode0 == fcode1
6150 && (fcode0 == BUILT_IN_EXP
6151 || fcode0 == BUILT_IN_EXPF
6152 || fcode0 == BUILT_IN_EXPL
6153 || fcode0 == BUILT_IN_EXP2
6154 || fcode0 == BUILT_IN_EXP2F
6155 || fcode0 == BUILT_IN_EXP2L
6156 || fcode0 == BUILT_IN_EXP10
6157 || fcode0 == BUILT_IN_EXP10F
6158 || fcode0 == BUILT_IN_EXP10L
6159 || fcode0 == BUILT_IN_POW10
6160 || fcode0 == BUILT_IN_POW10F
6161 || fcode0 == BUILT_IN_POW10L))
6162 {
6163 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6164 tree arg = build (PLUS_EXPR, type,
6165 TREE_VALUE (TREE_OPERAND (arg0, 1)),
6166 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6167 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6168 return build_function_call_expr (expfn, arglist);
6169 }
6170
6171 /* Optimizations of pow(...)*pow(...). */
6172 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6173 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6174 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6175 {
6176 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6177 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6178 1)));
6179 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6180 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6181 1)));
6182
6183 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6184 if (operand_equal_p (arg01, arg11, 0))
6185 {
6186 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6187 tree arg = build (MULT_EXPR, type, arg00, arg10);
6188 tree arglist = tree_cons (NULL_TREE, fold (arg),
6189 build_tree_list (NULL_TREE,
6190 arg01));
6191 return build_function_call_expr (powfn, arglist);
6192 }
6193
6194 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6195 if (operand_equal_p (arg00, arg10, 0))
6196 {
6197 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6198 tree arg = fold (build (PLUS_EXPR, type, arg01, arg11));
6199 tree arglist = tree_cons (NULL_TREE, arg00,
6200 build_tree_list (NULL_TREE,
6201 arg));
6202 return build_function_call_expr (powfn, arglist);
6203 }
6204 }
6205
6206 /* Optimize tan(x)*cos(x) as sin(x). */
6207 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6208 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6209 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6210 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6211 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6212 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6213 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6214 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6215 {
6216 tree sinfn;
6217
6218 switch (fcode0)
6219 {
6220 case BUILT_IN_TAN:
6221 case BUILT_IN_COS:
6222 sinfn = implicit_built_in_decls[BUILT_IN_SIN];
6223 break;
6224 case BUILT_IN_TANF:
6225 case BUILT_IN_COSF:
6226 sinfn = implicit_built_in_decls[BUILT_IN_SINF];
6227 break;
6228 case BUILT_IN_TANL:
6229 case BUILT_IN_COSL:
6230 sinfn = implicit_built_in_decls[BUILT_IN_SINL];
6231 break;
6232 default:
6233 sinfn = NULL_TREE;
6234 }
6235
6236 if (sinfn != NULL_TREE)
6237 return build_function_call_expr (sinfn,
6238 TREE_OPERAND (arg0, 1));
6239 }
6240
6241 /* Optimize x*pow(x,c) as pow(x,c+1). */
6242 if (fcode1 == BUILT_IN_POW
6243 || fcode1 == BUILT_IN_POWF
6244 || fcode1 == BUILT_IN_POWL)
6245 {
6246 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6247 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6248 1)));
6249 if (TREE_CODE (arg11) == REAL_CST
6250 && ! TREE_CONSTANT_OVERFLOW (arg11)
6251 && operand_equal_p (arg0, arg10, 0))
6252 {
6253 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6254 REAL_VALUE_TYPE c;
6255 tree arg, arglist;
6256
6257 c = TREE_REAL_CST (arg11);
6258 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6259 arg = build_real (type, c);
6260 arglist = build_tree_list (NULL_TREE, arg);
6261 arglist = tree_cons (NULL_TREE, arg0, arglist);
6262 return build_function_call_expr (powfn, arglist);
6263 }
6264 }
6265
6266 /* Optimize pow(x,c)*x as pow(x,c+1). */
6267 if (fcode0 == BUILT_IN_POW
6268 || fcode0 == BUILT_IN_POWF
6269 || fcode0 == BUILT_IN_POWL)
6270 {
6271 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6272 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6273 1)));
6274 if (TREE_CODE (arg01) == REAL_CST
6275 && ! TREE_CONSTANT_OVERFLOW (arg01)
6276 && operand_equal_p (arg1, arg00, 0))
6277 {
6278 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6279 REAL_VALUE_TYPE c;
6280 tree arg, arglist;
6281
6282 c = TREE_REAL_CST (arg01);
6283 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6284 arg = build_real (type, c);
6285 arglist = build_tree_list (NULL_TREE, arg);
6286 arglist = tree_cons (NULL_TREE, arg1, arglist);
6287 return build_function_call_expr (powfn, arglist);
6288 }
6289 }
6290
6291 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6292 if (! optimize_size
6293 && operand_equal_p (arg0, arg1, 0))
6294 {
6295 tree powfn;
6296
6297 if (type == double_type_node)
6298 powfn = implicit_built_in_decls[BUILT_IN_POW];
6299 else if (type == float_type_node)
6300 powfn = implicit_built_in_decls[BUILT_IN_POWF];
6301 else if (type == long_double_type_node)
6302 powfn = implicit_built_in_decls[BUILT_IN_POWL];
6303 else
6304 powfn = NULL_TREE;
6305
6306 if (powfn)
6307 {
6308 tree arg = build_real (type, dconst2);
6309 tree arglist = build_tree_list (NULL_TREE, arg);
6310 arglist = tree_cons (NULL_TREE, arg0, arglist);
6311 return build_function_call_expr (powfn, arglist);
6312 }
6313 }
6314 }
6315 }
6316 goto associate;
6317
6318 case BIT_IOR_EXPR:
6319 bit_ior:
6320 if (integer_all_onesp (arg1))
6321 return omit_one_operand (type, arg1, arg0);
6322 if (integer_zerop (arg1))
6323 return non_lvalue (convert (type, arg0));
6324 t1 = distribute_bit_expr (code, type, arg0, arg1);
6325 if (t1 != NULL_TREE)
6326 return t1;
6327
6328 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6329
6330 This results in more efficient code for machines without a NAND
6331 instruction. Combine will canonicalize to the first form
6332 which will allow use of NAND instructions provided by the
6333 backend if they exist. */
6334 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6335 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6336 {
6337 return fold (build1 (BIT_NOT_EXPR, type,
6338 build (BIT_AND_EXPR, type,
6339 TREE_OPERAND (arg0, 0),
6340 TREE_OPERAND (arg1, 0))));
6341 }
6342
6343 /* See if this can be simplified into a rotate first. If that
6344 is unsuccessful continue in the association code. */
6345 goto bit_rotate;
6346
6347 case BIT_XOR_EXPR:
6348 if (integer_zerop (arg1))
6349 return non_lvalue (convert (type, arg0));
6350 if (integer_all_onesp (arg1))
6351 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6352
6353 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6354 with a constant, and the two constants have no bits in common,
6355 we should treat this as a BIT_IOR_EXPR since this may produce more
6356 simplifications. */
6357 if (TREE_CODE (arg0) == BIT_AND_EXPR
6358 && TREE_CODE (arg1) == BIT_AND_EXPR
6359 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6360 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6361 && integer_zerop (const_binop (BIT_AND_EXPR,
6362 TREE_OPERAND (arg0, 1),
6363 TREE_OPERAND (arg1, 1), 0)))
6364 {
6365 code = BIT_IOR_EXPR;
6366 goto bit_ior;
6367 }
6368
6369 /* See if this can be simplified into a rotate first. If that
6370 is unsuccessful continue in the association code. */
6371 goto bit_rotate;
6372
6373 case BIT_AND_EXPR:
6374 if (integer_all_onesp (arg1))
6375 return non_lvalue (convert (type, arg0));
6376 if (integer_zerop (arg1))
6377 return omit_one_operand (type, arg1, arg0);
6378 t1 = distribute_bit_expr (code, type, arg0, arg1);
6379 if (t1 != NULL_TREE)
6380 return t1;
6381 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6382 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6383 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6384 {
6385 unsigned int prec
6386 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6387
6388 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6389 && (~TREE_INT_CST_LOW (arg1)
6390 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6391 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
6392 }
6393
6394 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6395
6396 This results in more efficient code for machines without a NOR
6397 instruction. Combine will canonicalize to the first form
6398 which will allow use of NOR instructions provided by the
6399 backend if they exist. */
6400 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6401 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6402 {
6403 return fold (build1 (BIT_NOT_EXPR, type,
6404 build (BIT_IOR_EXPR, type,
6405 TREE_OPERAND (arg0, 0),
6406 TREE_OPERAND (arg1, 0))));
6407 }
6408
6409 goto associate;
6410
6411 case RDIV_EXPR:
6412 /* Don't touch a floating-point divide by zero unless the mode
6413 of the constant can represent infinity. */
6414 if (TREE_CODE (arg1) == REAL_CST
6415 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6416 && real_zerop (arg1))
6417 return t;
6418
6419 /* (-A) / (-B) -> A / B */
6420 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6421 return fold (build (RDIV_EXPR, type,
6422 TREE_OPERAND (arg0, 0),
6423 negate_expr (arg1)));
6424 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6425 return fold (build (RDIV_EXPR, type,
6426 negate_expr (arg0),
6427 TREE_OPERAND (arg1, 0)));
6428
6429 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6430 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6431 && real_onep (arg1))
6432 return non_lvalue (convert (type, arg0));
6433
6434 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
6435 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6436 && real_minus_onep (arg1))
6437 return non_lvalue (convert (type, negate_expr (arg0)));
6438
6439 /* If ARG1 is a constant, we can convert this to a multiply by the
6440 reciprocal. This does not have the same rounding properties,
6441 so only do this if -funsafe-math-optimizations. We can actually
6442 always safely do it if ARG1 is a power of two, but it's hard to
6443 tell if it is or not in a portable manner. */
6444 if (TREE_CODE (arg1) == REAL_CST)
6445 {
6446 if (flag_unsafe_math_optimizations
6447 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6448 arg1, 0)))
6449 return fold (build (MULT_EXPR, type, arg0, tem));
6450 /* Find the reciprocal if optimizing and the result is exact. */
6451 if (optimize)
6452 {
6453 REAL_VALUE_TYPE r;
6454 r = TREE_REAL_CST (arg1);
6455 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6456 {
6457 tem = build_real (type, r);
6458 return fold (build (MULT_EXPR, type, arg0, tem));
6459 }
6460 }
6461 }
6462 /* Convert A/B/C to A/(B*C). */
6463 if (flag_unsafe_math_optimizations
6464 && TREE_CODE (arg0) == RDIV_EXPR)
6465 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6466 fold (build (MULT_EXPR, type,
6467 TREE_OPERAND (arg0, 1), arg1))));
6468
6469 /* Convert A/(B/C) to (A/B)*C. */
6470 if (flag_unsafe_math_optimizations
6471 && TREE_CODE (arg1) == RDIV_EXPR)
6472 return fold (build (MULT_EXPR, type,
6473 fold (build (RDIV_EXPR, type, arg0,
6474 TREE_OPERAND (arg1, 0))),
6475 TREE_OPERAND (arg1, 1)));
6476
6477 /* Convert C1/(X*C2) into (C1/C2)/X. */
6478 if (flag_unsafe_math_optimizations
6479 && TREE_CODE (arg1) == MULT_EXPR
6480 && TREE_CODE (arg0) == REAL_CST
6481 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
6482 {
6483 tree tem = const_binop (RDIV_EXPR, arg0,
6484 TREE_OPERAND (arg1, 1), 0);
6485 if (tem)
6486 return fold (build (RDIV_EXPR, type, tem,
6487 TREE_OPERAND (arg1, 0)));
6488 }
6489
6490 if (flag_unsafe_math_optimizations)
6491 {
6492 enum built_in_function fcode = builtin_mathfn_code (arg1);
6493 /* Optimize x/expN(y) into x*expN(-y). */
6494 if (fcode == BUILT_IN_EXP
6495 || fcode == BUILT_IN_EXPF
6496 || fcode == BUILT_IN_EXPL
6497 || fcode == BUILT_IN_EXP2
6498 || fcode == BUILT_IN_EXP2F
6499 || fcode == BUILT_IN_EXP2L
6500 || fcode == BUILT_IN_EXP10
6501 || fcode == BUILT_IN_EXP10F
6502 || fcode == BUILT_IN_EXP10L
6503 || fcode == BUILT_IN_POW10
6504 || fcode == BUILT_IN_POW10F
6505 || fcode == BUILT_IN_POW10L)
6506 {
6507 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6508 tree arg = build1 (NEGATE_EXPR, type,
6509 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6510 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6511 arg1 = build_function_call_expr (expfn, arglist);
6512 return fold (build (MULT_EXPR, type, arg0, arg1));
6513 }
6514
6515 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6516 if (fcode == BUILT_IN_POW
6517 || fcode == BUILT_IN_POWF
6518 || fcode == BUILT_IN_POWL)
6519 {
6520 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6521 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6522 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6523 tree neg11 = fold (build1 (NEGATE_EXPR, type, arg11));
6524 tree arglist = tree_cons(NULL_TREE, arg10,
6525 build_tree_list (NULL_TREE, neg11));
6526 arg1 = build_function_call_expr (powfn, arglist);
6527 return fold (build (MULT_EXPR, type, arg0, arg1));
6528 }
6529 }
6530
6531 if (flag_unsafe_math_optimizations)
6532 {
6533 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6534 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6535
6536 /* Optimize sin(x)/cos(x) as tan(x). */
6537 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
6538 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
6539 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
6540 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6541 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6542 {
6543 tree tanfn;
6544
6545 if (fcode0 == BUILT_IN_SIN)
6546 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6547 else if (fcode0 == BUILT_IN_SINF)
6548 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6549 else if (fcode0 == BUILT_IN_SINL)
6550 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6551 else
6552 tanfn = NULL_TREE;
6553
6554 if (tanfn != NULL_TREE)
6555 return build_function_call_expr (tanfn,
6556 TREE_OPERAND (arg0, 1));
6557 }
6558
6559 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
6560 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
6561 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
6562 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
6563 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6564 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6565 {
6566 tree tanfn;
6567
6568 if (fcode0 == BUILT_IN_COS)
6569 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6570 else if (fcode0 == BUILT_IN_COSF)
6571 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6572 else if (fcode0 == BUILT_IN_COSL)
6573 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6574 else
6575 tanfn = NULL_TREE;
6576
6577 if (tanfn != NULL_TREE)
6578 {
6579 tree tmp = TREE_OPERAND (arg0, 1);
6580 tmp = build_function_call_expr (tanfn, tmp);
6581 return fold (build (RDIV_EXPR, type,
6582 build_real (type, dconst1),
6583 tmp));
6584 }
6585 }
6586
6587 /* Optimize pow(x,c)/x as pow(x,c-1). */
6588 if (fcode0 == BUILT_IN_POW
6589 || fcode0 == BUILT_IN_POWF
6590 || fcode0 == BUILT_IN_POWL)
6591 {
6592 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6593 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
6594 if (TREE_CODE (arg01) == REAL_CST
6595 && ! TREE_CONSTANT_OVERFLOW (arg01)
6596 && operand_equal_p (arg1, arg00, 0))
6597 {
6598 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6599 REAL_VALUE_TYPE c;
6600 tree arg, arglist;
6601
6602 c = TREE_REAL_CST (arg01);
6603 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
6604 arg = build_real (type, c);
6605 arglist = build_tree_list (NULL_TREE, arg);
6606 arglist = tree_cons (NULL_TREE, arg1, arglist);
6607 return build_function_call_expr (powfn, arglist);
6608 }
6609 }
6610 }
6611 goto binary;
6612
6613 case TRUNC_DIV_EXPR:
6614 case ROUND_DIV_EXPR:
6615 case FLOOR_DIV_EXPR:
6616 case CEIL_DIV_EXPR:
6617 case EXACT_DIV_EXPR:
6618 if (integer_onep (arg1))
6619 return non_lvalue (convert (type, arg0));
6620 if (integer_zerop (arg1))
6621 return t;
6622
6623 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6624 operation, EXACT_DIV_EXPR.
6625
6626 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6627 At one time others generated faster code, it's not clear if they do
6628 after the last round to changes to the DIV code in expmed.c. */
6629 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
6630 && multiple_of_p (type, arg0, arg1))
6631 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
6632
6633 if (TREE_CODE (arg1) == INTEGER_CST
6634 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6635 code, NULL_TREE)))
6636 return convert (type, tem);
6637
6638 goto binary;
6639
6640 case CEIL_MOD_EXPR:
6641 case FLOOR_MOD_EXPR:
6642 case ROUND_MOD_EXPR:
6643 case TRUNC_MOD_EXPR:
6644 if (integer_onep (arg1))
6645 return omit_one_operand (type, integer_zero_node, arg0);
6646 if (integer_zerop (arg1))
6647 return t;
6648
6649 if (TREE_CODE (arg1) == INTEGER_CST
6650 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6651 code, NULL_TREE)))
6652 return convert (type, tem);
6653
6654 goto binary;
6655
6656 case LROTATE_EXPR:
6657 case RROTATE_EXPR:
6658 if (integer_all_onesp (arg0))
6659 return omit_one_operand (type, arg0, arg1);
6660 goto shift;
6661
6662 case RSHIFT_EXPR:
6663 /* Optimize -1 >> x for arithmetic right shifts. */
6664 if (integer_all_onesp (arg0) && ! TREE_UNSIGNED (type))
6665 return omit_one_operand (type, arg0, arg1);
6666 /* ... fall through ... */
6667
6668 case LSHIFT_EXPR:
6669 shift:
6670 if (integer_zerop (arg1))
6671 return non_lvalue (convert (type, arg0));
6672 if (integer_zerop (arg0))
6673 return omit_one_operand (type, arg0, arg1);
6674
6675 /* Since negative shift count is not well-defined,
6676 don't try to compute it in the compiler. */
6677 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
6678 return t;
6679 /* Rewrite an LROTATE_EXPR by a constant into an
6680 RROTATE_EXPR by a new constant. */
6681 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
6682 {
6683 tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
6684 tem = convert (TREE_TYPE (arg1), tem);
6685 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
6686 return fold (build (RROTATE_EXPR, type, arg0, tem));
6687 }
6688
6689 /* If we have a rotate of a bit operation with the rotate count and
6690 the second operand of the bit operation both constant,
6691 permute the two operations. */
6692 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6693 && (TREE_CODE (arg0) == BIT_AND_EXPR
6694 || TREE_CODE (arg0) == BIT_IOR_EXPR
6695 || TREE_CODE (arg0) == BIT_XOR_EXPR)
6696 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6697 return fold (build (TREE_CODE (arg0), type,
6698 fold (build (code, type,
6699 TREE_OPERAND (arg0, 0), arg1)),
6700 fold (build (code, type,
6701 TREE_OPERAND (arg0, 1), arg1))));
6702
6703 /* Two consecutive rotates adding up to the width of the mode can
6704 be ignored. */
6705 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6706 && TREE_CODE (arg0) == RROTATE_EXPR
6707 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6708 && TREE_INT_CST_HIGH (arg1) == 0
6709 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
6710 && ((TREE_INT_CST_LOW (arg1)
6711 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
6712 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
6713 return TREE_OPERAND (arg0, 0);
6714
6715 goto binary;
6716
6717 case MIN_EXPR:
6718 if (operand_equal_p (arg0, arg1, 0))
6719 return omit_one_operand (type, arg0, arg1);
6720 if (INTEGRAL_TYPE_P (type)
6721 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
6722 return omit_one_operand (type, arg1, arg0);
6723 goto associate;
6724
6725 case MAX_EXPR:
6726 if (operand_equal_p (arg0, arg1, 0))
6727 return omit_one_operand (type, arg0, arg1);
6728 if (INTEGRAL_TYPE_P (type)
6729 && TYPE_MAX_VALUE (type)
6730 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
6731 return omit_one_operand (type, arg1, arg0);
6732 goto associate;
6733
6734 case TRUTH_NOT_EXPR:
6735 /* Note that the operand of this must be an int
6736 and its values must be 0 or 1.
6737 ("true" is a fixed value perhaps depending on the language,
6738 but we don't handle values other than 1 correctly yet.) */
6739 tem = invert_truthvalue (arg0);
6740 /* Avoid infinite recursion. */
6741 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6742 {
6743 tem = fold_single_bit_test (code, arg0, arg1, type);
6744 if (tem)
6745 return tem;
6746 return t;
6747 }
6748 return convert (type, tem);
6749
6750 case TRUTH_ANDIF_EXPR:
6751 /* Note that the operands of this must be ints
6752 and their values must be 0 or 1.
6753 ("true" is a fixed value perhaps depending on the language.) */
6754 /* If first arg is constant zero, return it. */
6755 if (integer_zerop (arg0))
6756 return convert (type, arg0);
6757 case TRUTH_AND_EXPR:
6758 /* If either arg is constant true, drop it. */
6759 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6760 return non_lvalue (convert (type, arg1));
6761 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
6762 /* Preserve sequence points. */
6763 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
6764 return non_lvalue (convert (type, arg0));
6765 /* If second arg is constant zero, result is zero, but first arg
6766 must be evaluated. */
6767 if (integer_zerop (arg1))
6768 return omit_one_operand (type, arg1, arg0);
6769 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
6770 case will be handled here. */
6771 if (integer_zerop (arg0))
6772 return omit_one_operand (type, arg0, arg1);
6773
6774 truth_andor:
6775 /* We only do these simplifications if we are optimizing. */
6776 if (!optimize)
6777 return t;
6778
6779 /* Check for things like (A || B) && (A || C). We can convert this
6780 to A || (B && C). Note that either operator can be any of the four
6781 truth and/or operations and the transformation will still be
6782 valid. Also note that we only care about order for the
6783 ANDIF and ORIF operators. If B contains side effects, this
6784 might change the truth-value of A. */
6785 if (TREE_CODE (arg0) == TREE_CODE (arg1)
6786 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
6787 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
6788 || TREE_CODE (arg0) == TRUTH_AND_EXPR
6789 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
6790 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
6791 {
6792 tree a00 = TREE_OPERAND (arg0, 0);
6793 tree a01 = TREE_OPERAND (arg0, 1);
6794 tree a10 = TREE_OPERAND (arg1, 0);
6795 tree a11 = TREE_OPERAND (arg1, 1);
6796 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
6797 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
6798 && (code == TRUTH_AND_EXPR
6799 || code == TRUTH_OR_EXPR));
6800
6801 if (operand_equal_p (a00, a10, 0))
6802 return fold (build (TREE_CODE (arg0), type, a00,
6803 fold (build (code, type, a01, a11))));
6804 else if (commutative && operand_equal_p (a00, a11, 0))
6805 return fold (build (TREE_CODE (arg0), type, a00,
6806 fold (build (code, type, a01, a10))));
6807 else if (commutative && operand_equal_p (a01, a10, 0))
6808 return fold (build (TREE_CODE (arg0), type, a01,
6809 fold (build (code, type, a00, a11))));
6810
6811 /* This case if tricky because we must either have commutative
6812 operators or else A10 must not have side-effects. */
6813
6814 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
6815 && operand_equal_p (a01, a11, 0))
6816 return fold (build (TREE_CODE (arg0), type,
6817 fold (build (code, type, a00, a10)),
6818 a01));
6819 }
6820
6821 /* See if we can build a range comparison. */
6822 if (0 != (tem = fold_range_test (t)))
6823 return tem;
6824
6825 /* Check for the possibility of merging component references. If our
6826 lhs is another similar operation, try to merge its rhs with our
6827 rhs. Then try to merge our lhs and rhs. */
6828 if (TREE_CODE (arg0) == code
6829 && 0 != (tem = fold_truthop (code, type,
6830 TREE_OPERAND (arg0, 1), arg1)))
6831 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6832
6833 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
6834 return tem;
6835
6836 return t;
6837
6838 case TRUTH_ORIF_EXPR:
6839 /* Note that the operands of this must be ints
6840 and their values must be 0 or true.
6841 ("true" is a fixed value perhaps depending on the language.) */
6842 /* If first arg is constant true, return it. */
6843 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6844 return convert (type, arg0);
6845 case TRUTH_OR_EXPR:
6846 /* If either arg is constant zero, drop it. */
6847 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
6848 return non_lvalue (convert (type, arg1));
6849 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
6850 /* Preserve sequence points. */
6851 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
6852 return non_lvalue (convert (type, arg0));
6853 /* If second arg is constant true, result is true, but we must
6854 evaluate first arg. */
6855 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
6856 return omit_one_operand (type, arg1, arg0);
6857 /* Likewise for first arg, but note this only occurs here for
6858 TRUTH_OR_EXPR. */
6859 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6860 return omit_one_operand (type, arg0, arg1);
6861 goto truth_andor;
6862
6863 case TRUTH_XOR_EXPR:
6864 /* If either arg is constant zero, drop it. */
6865 if (integer_zerop (arg0))
6866 return non_lvalue (convert (type, arg1));
6867 if (integer_zerop (arg1))
6868 return non_lvalue (convert (type, arg0));
6869 /* If either arg is constant true, this is a logical inversion. */
6870 if (integer_onep (arg0))
6871 return non_lvalue (convert (type, invert_truthvalue (arg1)));
6872 if (integer_onep (arg1))
6873 return non_lvalue (convert (type, invert_truthvalue (arg0)));
6874 return t;
6875
6876 case EQ_EXPR:
6877 case NE_EXPR:
6878 case LT_EXPR:
6879 case GT_EXPR:
6880 case LE_EXPR:
6881 case GE_EXPR:
6882 /* If one arg is a real or integer constant, put it last. */
6883 if (tree_swap_operands_p (arg0, arg1))
6884 return fold (build (swap_tree_comparison (code), type, arg1, arg0));
6885
6886 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
6887 {
6888 tree targ0 = strip_float_extensions (arg0);
6889 tree targ1 = strip_float_extensions (arg1);
6890 tree newtype = TREE_TYPE (targ0);
6891
6892 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
6893 newtype = TREE_TYPE (targ1);
6894
6895 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
6896 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
6897 return fold (build (code, type, convert (newtype, targ0),
6898 convert (newtype, targ1)));
6899
6900 /* (-a) CMP (-b) -> b CMP a */
6901 if (TREE_CODE (arg0) == NEGATE_EXPR
6902 && TREE_CODE (arg1) == NEGATE_EXPR)
6903 return fold (build (code, type, TREE_OPERAND (arg1, 0),
6904 TREE_OPERAND (arg0, 0)));
6905
6906 if (TREE_CODE (arg1) == REAL_CST)
6907 {
6908 REAL_VALUE_TYPE cst;
6909 cst = TREE_REAL_CST (arg1);
6910
6911 /* (-a) CMP CST -> a swap(CMP) (-CST) */
6912 if (TREE_CODE (arg0) == NEGATE_EXPR)
6913 return
6914 fold (build (swap_tree_comparison (code), type,
6915 TREE_OPERAND (arg0, 0),
6916 build_real (TREE_TYPE (arg1),
6917 REAL_VALUE_NEGATE (cst))));
6918
6919 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
6920 /* a CMP (-0) -> a CMP 0 */
6921 if (REAL_VALUE_MINUS_ZERO (cst))
6922 return fold (build (code, type, arg0,
6923 build_real (TREE_TYPE (arg1), dconst0)));
6924
6925 /* x != NaN is always true, other ops are always false. */
6926 if (REAL_VALUE_ISNAN (cst)
6927 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
6928 {
6929 t = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
6930 return omit_one_operand (type, convert (type, t), arg0);
6931 }
6932
6933 /* Fold comparisons against infinity. */
6934 if (REAL_VALUE_ISINF (cst))
6935 {
6936 tem = fold_inf_compare (code, type, arg0, arg1);
6937 if (tem != NULL_TREE)
6938 return tem;
6939 }
6940 }
6941
6942 /* If this is a comparison of a real constant with a PLUS_EXPR
6943 or a MINUS_EXPR of a real constant, we can convert it into a
6944 comparison with a revised real constant as long as no overflow
6945 occurs when unsafe_math_optimizations are enabled. */
6946 if (flag_unsafe_math_optimizations
6947 && TREE_CODE (arg1) == REAL_CST
6948 && (TREE_CODE (arg0) == PLUS_EXPR
6949 || TREE_CODE (arg0) == MINUS_EXPR)
6950 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6951 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6952 ? MINUS_EXPR : PLUS_EXPR,
6953 arg1, TREE_OPERAND (arg0, 1), 0))
6954 && ! TREE_CONSTANT_OVERFLOW (tem))
6955 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6956
6957 /* Likewise, we can simplify a comparison of a real constant with
6958 a MINUS_EXPR whose first operand is also a real constant, i.e.
6959 (c1 - x) < c2 becomes x > c1-c2. */
6960 if (flag_unsafe_math_optimizations
6961 && TREE_CODE (arg1) == REAL_CST
6962 && TREE_CODE (arg0) == MINUS_EXPR
6963 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
6964 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
6965 arg1, 0))
6966 && ! TREE_CONSTANT_OVERFLOW (tem))
6967 return fold (build (swap_tree_comparison (code), type,
6968 TREE_OPERAND (arg0, 1), tem));
6969
6970 /* Fold comparisons against built-in math functions. */
6971 if (TREE_CODE (arg1) == REAL_CST
6972 && flag_unsafe_math_optimizations
6973 && ! flag_errno_math)
6974 {
6975 enum built_in_function fcode = builtin_mathfn_code (arg0);
6976
6977 if (fcode != END_BUILTINS)
6978 {
6979 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
6980 if (tem != NULL_TREE)
6981 return tem;
6982 }
6983 }
6984 }
6985
6986 /* Convert foo++ == CONST into ++foo == CONST + INCR.
6987 First, see if one arg is constant; find the constant arg
6988 and the other one. */
6989 {
6990 tree constop = 0, varop = NULL_TREE;
6991 int constopnum = -1;
6992
6993 if (TREE_CONSTANT (arg1))
6994 constopnum = 1, constop = arg1, varop = arg0;
6995 if (TREE_CONSTANT (arg0))
6996 constopnum = 0, constop = arg0, varop = arg1;
6997
6998 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
6999 {
7000 /* This optimization is invalid for ordered comparisons
7001 if CONST+INCR overflows or if foo+incr might overflow.
7002 This optimization is invalid for floating point due to rounding.
7003 For pointer types we assume overflow doesn't happen. */
7004 if (POINTER_TYPE_P (TREE_TYPE (varop))
7005 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
7006 && (code == EQ_EXPR || code == NE_EXPR)))
7007 {
7008 tree newconst
7009 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
7010 constop, TREE_OPERAND (varop, 1)));
7011
7012 /* Do not overwrite the current varop to be a preincrement,
7013 create a new node so that we won't confuse our caller who
7014 might create trees and throw them away, reusing the
7015 arguments that they passed to build. This shows up in
7016 the THEN or ELSE parts of ?: being postincrements. */
7017 varop = build (PREINCREMENT_EXPR, TREE_TYPE (varop),
7018 TREE_OPERAND (varop, 0),
7019 TREE_OPERAND (varop, 1));
7020
7021 /* If VAROP is a reference to a bitfield, we must mask
7022 the constant by the width of the field. */
7023 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7024 && DECL_BIT_FIELD(TREE_OPERAND
7025 (TREE_OPERAND (varop, 0), 1)))
7026 {
7027 int size
7028 = TREE_INT_CST_LOW (DECL_SIZE
7029 (TREE_OPERAND
7030 (TREE_OPERAND (varop, 0), 1)));
7031 tree mask, unsigned_type;
7032 unsigned int precision;
7033 tree folded_compare;
7034
7035 /* First check whether the comparison would come out
7036 always the same. If we don't do that we would
7037 change the meaning with the masking. */
7038 if (constopnum == 0)
7039 folded_compare = fold (build (code, type, constop,
7040 TREE_OPERAND (varop, 0)));
7041 else
7042 folded_compare = fold (build (code, type,
7043 TREE_OPERAND (varop, 0),
7044 constop));
7045 if (integer_zerop (folded_compare)
7046 || integer_onep (folded_compare))
7047 return omit_one_operand (type, folded_compare, varop);
7048
7049 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
7050 precision = TYPE_PRECISION (unsigned_type);
7051 mask = build_int_2 (~0, ~0);
7052 TREE_TYPE (mask) = unsigned_type;
7053 force_fit_type (mask, 0);
7054 mask = const_binop (RSHIFT_EXPR, mask,
7055 size_int (precision - size), 0);
7056 newconst = fold (build (BIT_AND_EXPR,
7057 TREE_TYPE (varop), newconst,
7058 convert (TREE_TYPE (varop),
7059 mask)));
7060 }
7061
7062 t = build (code, type,
7063 (constopnum == 0) ? newconst : varop,
7064 (constopnum == 1) ? newconst : varop);
7065 return t;
7066 }
7067 }
7068 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
7069 {
7070 if (POINTER_TYPE_P (TREE_TYPE (varop))
7071 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
7072 && (code == EQ_EXPR || code == NE_EXPR)))
7073 {
7074 tree newconst
7075 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
7076 constop, TREE_OPERAND (varop, 1)));
7077
7078 /* Do not overwrite the current varop to be a predecrement,
7079 create a new node so that we won't confuse our caller who
7080 might create trees and throw them away, reusing the
7081 arguments that they passed to build. This shows up in
7082 the THEN or ELSE parts of ?: being postdecrements. */
7083 varop = build (PREDECREMENT_EXPR, TREE_TYPE (varop),
7084 TREE_OPERAND (varop, 0),
7085 TREE_OPERAND (varop, 1));
7086
7087 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7088 && DECL_BIT_FIELD(TREE_OPERAND
7089 (TREE_OPERAND (varop, 0), 1)))
7090 {
7091 int size
7092 = TREE_INT_CST_LOW (DECL_SIZE
7093 (TREE_OPERAND
7094 (TREE_OPERAND (varop, 0), 1)));
7095 tree mask, unsigned_type;
7096 unsigned int precision;
7097 tree folded_compare;
7098
7099 if (constopnum == 0)
7100 folded_compare = fold (build (code, type, constop,
7101 TREE_OPERAND (varop, 0)));
7102 else
7103 folded_compare = fold (build (code, type,
7104 TREE_OPERAND (varop, 0),
7105 constop));
7106 if (integer_zerop (folded_compare)
7107 || integer_onep (folded_compare))
7108 return omit_one_operand (type, folded_compare, varop);
7109
7110 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
7111 precision = TYPE_PRECISION (unsigned_type);
7112 mask = build_int_2 (~0, ~0);
7113 TREE_TYPE (mask) = TREE_TYPE (varop);
7114 force_fit_type (mask, 0);
7115 mask = const_binop (RSHIFT_EXPR, mask,
7116 size_int (precision - size), 0);
7117 newconst = fold (build (BIT_AND_EXPR,
7118 TREE_TYPE (varop), newconst,
7119 convert (TREE_TYPE (varop),
7120 mask)));
7121 }
7122
7123 t = build (code, type,
7124 (constopnum == 0) ? newconst : varop,
7125 (constopnum == 1) ? newconst : varop);
7126 return t;
7127 }
7128 }
7129 }
7130
7131 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7132 This transformation affects the cases which are handled in later
7133 optimizations involving comparisons with non-negative constants. */
7134 if (TREE_CODE (arg1) == INTEGER_CST
7135 && TREE_CODE (arg0) != INTEGER_CST
7136 && tree_int_cst_sgn (arg1) > 0)
7137 {
7138 switch (code)
7139 {
7140 case GE_EXPR:
7141 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7142 return fold (build (GT_EXPR, type, arg0, arg1));
7143
7144 case LT_EXPR:
7145 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7146 return fold (build (LE_EXPR, type, arg0, arg1));
7147
7148 default:
7149 break;
7150 }
7151 }
7152
7153 /* Comparisons with the highest or lowest possible integer of
7154 the specified size will have known values. */
7155 {
7156 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7157
7158 if (TREE_CODE (arg1) == INTEGER_CST
7159 && ! TREE_CONSTANT_OVERFLOW (arg1)
7160 && width <= HOST_BITS_PER_WIDE_INT
7161 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7162 || POINTER_TYPE_P (TREE_TYPE (arg1))))
7163 {
7164 unsigned HOST_WIDE_INT signed_max;
7165 unsigned HOST_WIDE_INT max, min;
7166
7167 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7168
7169 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7170 {
7171 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7172 min = 0;
7173 }
7174 else
7175 {
7176 max = signed_max;
7177 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7178 }
7179
7180 if (TREE_INT_CST_HIGH (arg1) == 0
7181 && TREE_INT_CST_LOW (arg1) == max)
7182 switch (code)
7183 {
7184 case GT_EXPR:
7185 return omit_one_operand (type,
7186 convert (type, integer_zero_node),
7187 arg0);
7188 case GE_EXPR:
7189 return fold (build (EQ_EXPR, type, arg0, arg1));
7190
7191 case LE_EXPR:
7192 return omit_one_operand (type,
7193 convert (type, integer_one_node),
7194 arg0);
7195 case LT_EXPR:
7196 return fold (build (NE_EXPR, type, arg0, arg1));
7197
7198 /* The GE_EXPR and LT_EXPR cases above are not normally
7199 reached because of previous transformations. */
7200
7201 default:
7202 break;
7203 }
7204 else if (TREE_INT_CST_HIGH (arg1) == 0
7205 && TREE_INT_CST_LOW (arg1) == max - 1)
7206 switch (code)
7207 {
7208 case GT_EXPR:
7209 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7210 return fold (build (EQ_EXPR, type, arg0, arg1));
7211 case LE_EXPR:
7212 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7213 return fold (build (NE_EXPR, type, arg0, arg1));
7214 default:
7215 break;
7216 }
7217 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7218 && TREE_INT_CST_LOW (arg1) == min)
7219 switch (code)
7220 {
7221 case LT_EXPR:
7222 return omit_one_operand (type,
7223 convert (type, integer_zero_node),
7224 arg0);
7225 case LE_EXPR:
7226 return fold (build (EQ_EXPR, type, arg0, arg1));
7227
7228 case GE_EXPR:
7229 return omit_one_operand (type,
7230 convert (type, integer_one_node),
7231 arg0);
7232 case GT_EXPR:
7233 return fold (build (NE_EXPR, type, arg0, arg1));
7234
7235 default:
7236 break;
7237 }
7238 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7239 && TREE_INT_CST_LOW (arg1) == min + 1)
7240 switch (code)
7241 {
7242 case GE_EXPR:
7243 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7244 return fold (build (NE_EXPR, type, arg0, arg1));
7245 case LT_EXPR:
7246 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7247 return fold (build (EQ_EXPR, type, arg0, arg1));
7248 default:
7249 break;
7250 }
7251
7252 else if (TREE_INT_CST_HIGH (arg1) == 0
7253 && TREE_INT_CST_LOW (arg1) == signed_max
7254 && TREE_UNSIGNED (TREE_TYPE (arg1))
7255 /* signed_type does not work on pointer types. */
7256 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7257 {
7258 /* The following case also applies to X < signed_max+1
7259 and X >= signed_max+1 because previous transformations. */
7260 if (code == LE_EXPR || code == GT_EXPR)
7261 {
7262 tree st0, st1;
7263 st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
7264 st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
7265 return fold
7266 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7267 type, convert (st0, arg0),
7268 convert (st1, integer_zero_node)));
7269 }
7270 }
7271 }
7272 }
7273
7274 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7275 a MINUS_EXPR of a constant, we can convert it into a comparison with
7276 a revised constant as long as no overflow occurs. */
7277 if ((code == EQ_EXPR || code == NE_EXPR)
7278 && TREE_CODE (arg1) == INTEGER_CST
7279 && (TREE_CODE (arg0) == PLUS_EXPR
7280 || TREE_CODE (arg0) == MINUS_EXPR)
7281 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7282 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7283 ? MINUS_EXPR : PLUS_EXPR,
7284 arg1, TREE_OPERAND (arg0, 1), 0))
7285 && ! TREE_CONSTANT_OVERFLOW (tem))
7286 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7287
7288 /* Similarly for a NEGATE_EXPR. */
7289 else if ((code == EQ_EXPR || code == NE_EXPR)
7290 && TREE_CODE (arg0) == NEGATE_EXPR
7291 && TREE_CODE (arg1) == INTEGER_CST
7292 && 0 != (tem = negate_expr (arg1))
7293 && TREE_CODE (tem) == INTEGER_CST
7294 && ! TREE_CONSTANT_OVERFLOW (tem))
7295 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7296
7297 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7298 for !=. Don't do this for ordered comparisons due to overflow. */
7299 else if ((code == NE_EXPR || code == EQ_EXPR)
7300 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7301 return fold (build (code, type,
7302 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7303
7304 /* If we are widening one operand of an integer comparison,
7305 see if the other operand is similarly being widened. Perhaps we
7306 can do the comparison in the narrower type. */
7307 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7308 && TREE_CODE (arg0) == NOP_EXPR
7309 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7310 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7311 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7312 || (TREE_CODE (t1) == INTEGER_CST
7313 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7314 return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
7315
7316 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7317 constant, we can simplify it. */
7318 else if (TREE_CODE (arg1) == INTEGER_CST
7319 && (TREE_CODE (arg0) == MIN_EXPR
7320 || TREE_CODE (arg0) == MAX_EXPR)
7321 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7322 return optimize_minmax_comparison (t);
7323
7324 /* If we are comparing an ABS_EXPR with a constant, we can
7325 convert all the cases into explicit comparisons, but they may
7326 well not be faster than doing the ABS and one comparison.
7327 But ABS (X) <= C is a range comparison, which becomes a subtraction
7328 and a comparison, and is probably faster. */
7329 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7330 && TREE_CODE (arg0) == ABS_EXPR
7331 && ! TREE_SIDE_EFFECTS (arg0)
7332 && (0 != (tem = negate_expr (arg1)))
7333 && TREE_CODE (tem) == INTEGER_CST
7334 && ! TREE_CONSTANT_OVERFLOW (tem))
7335 return fold (build (TRUTH_ANDIF_EXPR, type,
7336 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
7337 build (LE_EXPR, type,
7338 TREE_OPERAND (arg0, 0), arg1)));
7339
7340 /* If this is an EQ or NE comparison with zero and ARG0 is
7341 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7342 two operations, but the latter can be done in one less insn
7343 on machines that have only two-operand insns or on which a
7344 constant cannot be the first operand. */
7345 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7346 && TREE_CODE (arg0) == BIT_AND_EXPR)
7347 {
7348 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
7349 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
7350 return
7351 fold (build (code, type,
7352 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7353 build (RSHIFT_EXPR,
7354 TREE_TYPE (TREE_OPERAND (arg0, 0)),
7355 TREE_OPERAND (arg0, 1),
7356 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
7357 convert (TREE_TYPE (arg0),
7358 integer_one_node)),
7359 arg1));
7360 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7361 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7362 return
7363 fold (build (code, type,
7364 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7365 build (RSHIFT_EXPR,
7366 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7367 TREE_OPERAND (arg0, 0),
7368 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
7369 convert (TREE_TYPE (arg0),
7370 integer_one_node)),
7371 arg1));
7372 }
7373
7374 /* If this is an NE or EQ comparison of zero against the result of a
7375 signed MOD operation whose second operand is a power of 2, make
7376 the MOD operation unsigned since it is simpler and equivalent. */
7377 if ((code == NE_EXPR || code == EQ_EXPR)
7378 && integer_zerop (arg1)
7379 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
7380 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7381 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7382 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7383 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7384 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7385 {
7386 tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
7387 tree newmod = build (TREE_CODE (arg0), newtype,
7388 convert (newtype, TREE_OPERAND (arg0, 0)),
7389 convert (newtype, TREE_OPERAND (arg0, 1)));
7390
7391 return build (code, type, newmod, convert (newtype, arg1));
7392 }
7393
7394 /* If this is an NE comparison of zero with an AND of one, remove the
7395 comparison since the AND will give the correct value. */
7396 if (code == NE_EXPR && integer_zerop (arg1)
7397 && TREE_CODE (arg0) == BIT_AND_EXPR
7398 && integer_onep (TREE_OPERAND (arg0, 1)))
7399 return convert (type, arg0);
7400
7401 /* If we have (A & C) == C where C is a power of 2, convert this into
7402 (A & C) != 0. Similarly for NE_EXPR. */
7403 if ((code == EQ_EXPR || code == NE_EXPR)
7404 && TREE_CODE (arg0) == BIT_AND_EXPR
7405 && integer_pow2p (TREE_OPERAND (arg0, 1))
7406 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7407 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7408 arg0, integer_zero_node));
7409
7410 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7411 2, then fold the expression into shifts and logical operations. */
7412 tem = fold_single_bit_test (code, arg0, arg1, type);
7413 if (tem)
7414 return tem;
7415
7416 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7417 Similarly for NE_EXPR. */
7418 if ((code == EQ_EXPR || code == NE_EXPR)
7419 && TREE_CODE (arg0) == BIT_AND_EXPR
7420 && TREE_CODE (arg1) == INTEGER_CST
7421 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7422 {
7423 tree dandnotc
7424 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7425 arg1, build1 (BIT_NOT_EXPR,
7426 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7427 TREE_OPERAND (arg0, 1))));
7428 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7429 if (integer_nonzerop (dandnotc))
7430 return omit_one_operand (type, rslt, arg0);
7431 }
7432
7433 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7434 Similarly for NE_EXPR. */
7435 if ((code == EQ_EXPR || code == NE_EXPR)
7436 && TREE_CODE (arg0) == BIT_IOR_EXPR
7437 && TREE_CODE (arg1) == INTEGER_CST
7438 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7439 {
7440 tree candnotd
7441 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7442 TREE_OPERAND (arg0, 1),
7443 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
7444 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7445 if (integer_nonzerop (candnotd))
7446 return omit_one_operand (type, rslt, arg0);
7447 }
7448
7449 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7450 and similarly for >= into !=. */
7451 if ((code == LT_EXPR || code == GE_EXPR)
7452 && TREE_UNSIGNED (TREE_TYPE (arg0))
7453 && TREE_CODE (arg1) == LSHIFT_EXPR
7454 && integer_onep (TREE_OPERAND (arg1, 0)))
7455 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7456 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7457 TREE_OPERAND (arg1, 1)),
7458 convert (TREE_TYPE (arg0), integer_zero_node));
7459
7460 else if ((code == LT_EXPR || code == GE_EXPR)
7461 && TREE_UNSIGNED (TREE_TYPE (arg0))
7462 && (TREE_CODE (arg1) == NOP_EXPR
7463 || TREE_CODE (arg1) == CONVERT_EXPR)
7464 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7465 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7466 return
7467 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7468 convert (TREE_TYPE (arg0),
7469 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7470 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
7471 convert (TREE_TYPE (arg0), integer_zero_node));
7472
7473 /* Simplify comparison of something with itself. (For IEEE
7474 floating-point, we can only do some of these simplifications.) */
7475 if (operand_equal_p (arg0, arg1, 0))
7476 {
7477 switch (code)
7478 {
7479 case EQ_EXPR:
7480 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7481 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7482 return constant_boolean_node (1, type);
7483 break;
7484
7485 case GE_EXPR:
7486 case LE_EXPR:
7487 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7488 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7489 return constant_boolean_node (1, type);
7490 return fold (build (EQ_EXPR, type, arg0, arg1));
7491
7492 case NE_EXPR:
7493 /* For NE, we can only do this simplification if integer
7494 or we don't honor IEEE floating point NaNs. */
7495 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7496 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7497 break;
7498 /* ... fall through ... */
7499 case GT_EXPR:
7500 case LT_EXPR:
7501 return constant_boolean_node (0, type);
7502 default:
7503 abort ();
7504 }
7505 }
7506
7507 /* If we are comparing an expression that just has comparisons
7508 of two integer values, arithmetic expressions of those comparisons,
7509 and constants, we can simplify it. There are only three cases
7510 to check: the two values can either be equal, the first can be
7511 greater, or the second can be greater. Fold the expression for
7512 those three values. Since each value must be 0 or 1, we have
7513 eight possibilities, each of which corresponds to the constant 0
7514 or 1 or one of the six possible comparisons.
7515
7516 This handles common cases like (a > b) == 0 but also handles
7517 expressions like ((x > y) - (y > x)) > 0, which supposedly
7518 occur in macroized code. */
7519
7520 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7521 {
7522 tree cval1 = 0, cval2 = 0;
7523 int save_p = 0;
7524
7525 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7526 /* Don't handle degenerate cases here; they should already
7527 have been handled anyway. */
7528 && cval1 != 0 && cval2 != 0
7529 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7530 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7531 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7532 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7533 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7534 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7535 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7536 {
7537 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7538 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7539
7540 /* We can't just pass T to eval_subst in case cval1 or cval2
7541 was the same as ARG1. */
7542
7543 tree high_result
7544 = fold (build (code, type,
7545 eval_subst (arg0, cval1, maxval, cval2, minval),
7546 arg1));
7547 tree equal_result
7548 = fold (build (code, type,
7549 eval_subst (arg0, cval1, maxval, cval2, maxval),
7550 arg1));
7551 tree low_result
7552 = fold (build (code, type,
7553 eval_subst (arg0, cval1, minval, cval2, maxval),
7554 arg1));
7555
7556 /* All three of these results should be 0 or 1. Confirm they
7557 are. Then use those values to select the proper code
7558 to use. */
7559
7560 if ((integer_zerop (high_result)
7561 || integer_onep (high_result))
7562 && (integer_zerop (equal_result)
7563 || integer_onep (equal_result))
7564 && (integer_zerop (low_result)
7565 || integer_onep (low_result)))
7566 {
7567 /* Make a 3-bit mask with the high-order bit being the
7568 value for `>', the next for '=', and the low for '<'. */
7569 switch ((integer_onep (high_result) * 4)
7570 + (integer_onep (equal_result) * 2)
7571 + integer_onep (low_result))
7572 {
7573 case 0:
7574 /* Always false. */
7575 return omit_one_operand (type, integer_zero_node, arg0);
7576 case 1:
7577 code = LT_EXPR;
7578 break;
7579 case 2:
7580 code = EQ_EXPR;
7581 break;
7582 case 3:
7583 code = LE_EXPR;
7584 break;
7585 case 4:
7586 code = GT_EXPR;
7587 break;
7588 case 5:
7589 code = NE_EXPR;
7590 break;
7591 case 6:
7592 code = GE_EXPR;
7593 break;
7594 case 7:
7595 /* Always true. */
7596 return omit_one_operand (type, integer_one_node, arg0);
7597 }
7598
7599 t = build (code, type, cval1, cval2);
7600 if (save_p)
7601 return save_expr (t);
7602 else
7603 return fold (t);
7604 }
7605 }
7606 }
7607
7608 /* If this is a comparison of a field, we may be able to simplify it. */
7609 if (((TREE_CODE (arg0) == COMPONENT_REF
7610 && (*lang_hooks.can_use_bit_fields_p) ())
7611 || TREE_CODE (arg0) == BIT_FIELD_REF)
7612 && (code == EQ_EXPR || code == NE_EXPR)
7613 /* Handle the constant case even without -O
7614 to make sure the warnings are given. */
7615 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
7616 {
7617 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
7618 return t1 ? t1 : t;
7619 }
7620
7621 /* If this is a comparison of complex values and either or both sides
7622 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7623 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7624 This may prevent needless evaluations. */
7625 if ((code == EQ_EXPR || code == NE_EXPR)
7626 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
7627 && (TREE_CODE (arg0) == COMPLEX_EXPR
7628 || TREE_CODE (arg1) == COMPLEX_EXPR
7629 || TREE_CODE (arg0) == COMPLEX_CST
7630 || TREE_CODE (arg1) == COMPLEX_CST))
7631 {
7632 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
7633 tree real0, imag0, real1, imag1;
7634
7635 arg0 = save_expr (arg0);
7636 arg1 = save_expr (arg1);
7637 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
7638 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
7639 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
7640 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
7641
7642 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
7643 : TRUTH_ORIF_EXPR),
7644 type,
7645 fold (build (code, type, real0, real1)),
7646 fold (build (code, type, imag0, imag1))));
7647 }
7648
7649 /* Optimize comparisons of strlen vs zero to a compare of the
7650 first character of the string vs zero. To wit,
7651 strlen(ptr) == 0 => *ptr == 0
7652 strlen(ptr) != 0 => *ptr != 0
7653 Other cases should reduce to one of these two (or a constant)
7654 due to the return value of strlen being unsigned. */
7655 if ((code == EQ_EXPR || code == NE_EXPR)
7656 && integer_zerop (arg1)
7657 && TREE_CODE (arg0) == CALL_EXPR)
7658 {
7659 tree fndecl = get_callee_fndecl (arg0);
7660 tree arglist;
7661
7662 if (fndecl
7663 && DECL_BUILT_IN (fndecl)
7664 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
7665 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
7666 && (arglist = TREE_OPERAND (arg0, 1))
7667 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
7668 && ! TREE_CHAIN (arglist))
7669 return fold (build (code, type,
7670 build1 (INDIRECT_REF, char_type_node,
7671 TREE_VALUE(arglist)),
7672 integer_zero_node));
7673 }
7674
7675 /* From here on, the only cases we handle are when the result is
7676 known to be a constant.
7677
7678 To compute GT, swap the arguments and do LT.
7679 To compute GE, do LT and invert the result.
7680 To compute LE, swap the arguments, do LT and invert the result.
7681 To compute NE, do EQ and invert the result.
7682
7683 Therefore, the code below must handle only EQ and LT. */
7684
7685 if (code == LE_EXPR || code == GT_EXPR)
7686 {
7687 tem = arg0, arg0 = arg1, arg1 = tem;
7688 code = swap_tree_comparison (code);
7689 }
7690
7691 /* Note that it is safe to invert for real values here because we
7692 will check below in the one case that it matters. */
7693
7694 t1 = NULL_TREE;
7695 invert = 0;
7696 if (code == NE_EXPR || code == GE_EXPR)
7697 {
7698 invert = 1;
7699 code = invert_tree_comparison (code);
7700 }
7701
7702 /* Compute a result for LT or EQ if args permit;
7703 otherwise return T. */
7704 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
7705 {
7706 if (code == EQ_EXPR)
7707 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
7708 else
7709 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
7710 ? INT_CST_LT_UNSIGNED (arg0, arg1)
7711 : INT_CST_LT (arg0, arg1)),
7712 0);
7713 }
7714
7715 #if 0 /* This is no longer useful, but breaks some real code. */
7716 /* Assume a nonexplicit constant cannot equal an explicit one,
7717 since such code would be undefined anyway.
7718 Exception: on sysvr4, using #pragma weak,
7719 a label can come out as 0. */
7720 else if (TREE_CODE (arg1) == INTEGER_CST
7721 && !integer_zerop (arg1)
7722 && TREE_CONSTANT (arg0)
7723 && TREE_CODE (arg0) == ADDR_EXPR
7724 && code == EQ_EXPR)
7725 t1 = build_int_2 (0, 0);
7726 #endif
7727 /* Two real constants can be compared explicitly. */
7728 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
7729 {
7730 /* If either operand is a NaN, the result is false with two
7731 exceptions: First, an NE_EXPR is true on NaNs, but that case
7732 is already handled correctly since we will be inverting the
7733 result for NE_EXPR. Second, if we had inverted a LE_EXPR
7734 or a GE_EXPR into a LT_EXPR, we must return true so that it
7735 will be inverted into false. */
7736
7737 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
7738 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
7739 t1 = build_int_2 (invert && code == LT_EXPR, 0);
7740
7741 else if (code == EQ_EXPR)
7742 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
7743 TREE_REAL_CST (arg1)),
7744 0);
7745 else
7746 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
7747 TREE_REAL_CST (arg1)),
7748 0);
7749 }
7750
7751 if (t1 == NULL_TREE)
7752 return t;
7753
7754 if (invert)
7755 TREE_INT_CST_LOW (t1) ^= 1;
7756
7757 TREE_TYPE (t1) = type;
7758 if (TREE_CODE (type) == BOOLEAN_TYPE)
7759 return (*lang_hooks.truthvalue_conversion) (t1);
7760 return t1;
7761
7762 case COND_EXPR:
7763 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
7764 so all simple results must be passed through pedantic_non_lvalue. */
7765 if (TREE_CODE (arg0) == INTEGER_CST)
7766 return pedantic_non_lvalue
7767 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
7768 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
7769 return pedantic_omit_one_operand (type, arg1, arg0);
7770
7771 /* If we have A op B ? A : C, we may be able to convert this to a
7772 simpler expression, depending on the operation and the values
7773 of B and C. Signed zeros prevent all of these transformations,
7774 for reasons given above each one. */
7775
7776 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7777 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7778 arg1, TREE_OPERAND (arg0, 1))
7779 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
7780 {
7781 tree arg2 = TREE_OPERAND (t, 2);
7782 enum tree_code comp_code = TREE_CODE (arg0);
7783
7784 STRIP_NOPS (arg2);
7785
7786 /* If we have A op 0 ? A : -A, consider applying the following
7787 transformations:
7788
7789 A == 0? A : -A same as -A
7790 A != 0? A : -A same as A
7791 A >= 0? A : -A same as abs (A)
7792 A > 0? A : -A same as abs (A)
7793 A <= 0? A : -A same as -abs (A)
7794 A < 0? A : -A same as -abs (A)
7795
7796 None of these transformations work for modes with signed
7797 zeros. If A is +/-0, the first two transformations will
7798 change the sign of the result (from +0 to -0, or vice
7799 versa). The last four will fix the sign of the result,
7800 even though the original expressions could be positive or
7801 negative, depending on the sign of A.
7802
7803 Note that all these transformations are correct if A is
7804 NaN, since the two alternatives (A and -A) are also NaNs. */
7805 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
7806 ? real_zerop (TREE_OPERAND (arg0, 1))
7807 : integer_zerop (TREE_OPERAND (arg0, 1)))
7808 && TREE_CODE (arg2) == NEGATE_EXPR
7809 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
7810 switch (comp_code)
7811 {
7812 case EQ_EXPR:
7813 return
7814 pedantic_non_lvalue
7815 (convert (type,
7816 negate_expr
7817 (convert (TREE_TYPE (TREE_OPERAND (t, 1)),
7818 arg1))));
7819 case NE_EXPR:
7820 return pedantic_non_lvalue (convert (type, arg1));
7821 case GE_EXPR:
7822 case GT_EXPR:
7823 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7824 arg1 = convert ((*lang_hooks.types.signed_type)
7825 (TREE_TYPE (arg1)), arg1);
7826 return pedantic_non_lvalue
7827 (convert (type, fold (build1 (ABS_EXPR,
7828 TREE_TYPE (arg1), arg1))));
7829 case LE_EXPR:
7830 case LT_EXPR:
7831 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7832 arg1 = convert ((lang_hooks.types.signed_type)
7833 (TREE_TYPE (arg1)), arg1);
7834 return pedantic_non_lvalue
7835 (negate_expr (convert (type,
7836 fold (build1 (ABS_EXPR,
7837 TREE_TYPE (arg1),
7838 arg1)))));
7839 default:
7840 abort ();
7841 }
7842
7843 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
7844 A == 0 ? A : 0 is always 0 unless A is -0. Note that
7845 both transformations are correct when A is NaN: A != 0
7846 is then true, and A == 0 is false. */
7847
7848 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
7849 {
7850 if (comp_code == NE_EXPR)
7851 return pedantic_non_lvalue (convert (type, arg1));
7852 else if (comp_code == EQ_EXPR)
7853 return pedantic_non_lvalue (convert (type, integer_zero_node));
7854 }
7855
7856 /* Try some transformations of A op B ? A : B.
7857
7858 A == B? A : B same as B
7859 A != B? A : B same as A
7860 A >= B? A : B same as max (A, B)
7861 A > B? A : B same as max (B, A)
7862 A <= B? A : B same as min (A, B)
7863 A < B? A : B same as min (B, A)
7864
7865 As above, these transformations don't work in the presence
7866 of signed zeros. For example, if A and B are zeros of
7867 opposite sign, the first two transformations will change
7868 the sign of the result. In the last four, the original
7869 expressions give different results for (A=+0, B=-0) and
7870 (A=-0, B=+0), but the transformed expressions do not.
7871
7872 The first two transformations are correct if either A or B
7873 is a NaN. In the first transformation, the condition will
7874 be false, and B will indeed be chosen. In the case of the
7875 second transformation, the condition A != B will be true,
7876 and A will be chosen.
7877
7878 The conversions to max() and min() are not correct if B is
7879 a number and A is not. The conditions in the original
7880 expressions will be false, so all four give B. The min()
7881 and max() versions would give a NaN instead. */
7882 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
7883 arg2, TREE_OPERAND (arg0, 0)))
7884 {
7885 tree comp_op0 = TREE_OPERAND (arg0, 0);
7886 tree comp_op1 = TREE_OPERAND (arg0, 1);
7887 tree comp_type = TREE_TYPE (comp_op0);
7888
7889 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
7890 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
7891 {
7892 comp_type = type;
7893 comp_op0 = arg1;
7894 comp_op1 = arg2;
7895 }
7896
7897 switch (comp_code)
7898 {
7899 case EQ_EXPR:
7900 return pedantic_non_lvalue (convert (type, arg2));
7901 case NE_EXPR:
7902 return pedantic_non_lvalue (convert (type, arg1));
7903 case LE_EXPR:
7904 case LT_EXPR:
7905 /* In C++ a ?: expression can be an lvalue, so put the
7906 operand which will be used if they are equal first
7907 so that we can convert this back to the
7908 corresponding COND_EXPR. */
7909 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
7910 return pedantic_non_lvalue
7911 (convert (type, fold (build (MIN_EXPR, comp_type,
7912 (comp_code == LE_EXPR
7913 ? comp_op0 : comp_op1),
7914 (comp_code == LE_EXPR
7915 ? comp_op1 : comp_op0)))));
7916 break;
7917 case GE_EXPR:
7918 case GT_EXPR:
7919 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
7920 return pedantic_non_lvalue
7921 (convert (type, fold (build (MAX_EXPR, comp_type,
7922 (comp_code == GE_EXPR
7923 ? comp_op0 : comp_op1),
7924 (comp_code == GE_EXPR
7925 ? comp_op1 : comp_op0)))));
7926 break;
7927 default:
7928 abort ();
7929 }
7930 }
7931
7932 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
7933 we might still be able to simplify this. For example,
7934 if C1 is one less or one more than C2, this might have started
7935 out as a MIN or MAX and been transformed by this function.
7936 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
7937
7938 if (INTEGRAL_TYPE_P (type)
7939 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7940 && TREE_CODE (arg2) == INTEGER_CST)
7941 switch (comp_code)
7942 {
7943 case EQ_EXPR:
7944 /* We can replace A with C1 in this case. */
7945 arg1 = convert (type, TREE_OPERAND (arg0, 1));
7946 return fold (build (code, type, TREE_OPERAND (t, 0), arg1,
7947 TREE_OPERAND (t, 2)));
7948
7949 case LT_EXPR:
7950 /* If C1 is C2 + 1, this is min(A, C2). */
7951 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
7952 && operand_equal_p (TREE_OPERAND (arg0, 1),
7953 const_binop (PLUS_EXPR, arg2,
7954 integer_one_node, 0), 1))
7955 return pedantic_non_lvalue
7956 (fold (build (MIN_EXPR, type, arg1, arg2)));
7957 break;
7958
7959 case LE_EXPR:
7960 /* If C1 is C2 - 1, this is min(A, C2). */
7961 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
7962 && operand_equal_p (TREE_OPERAND (arg0, 1),
7963 const_binop (MINUS_EXPR, arg2,
7964 integer_one_node, 0), 1))
7965 return pedantic_non_lvalue
7966 (fold (build (MIN_EXPR, type, arg1, arg2)));
7967 break;
7968
7969 case GT_EXPR:
7970 /* If C1 is C2 - 1, this is max(A, C2). */
7971 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
7972 && operand_equal_p (TREE_OPERAND (arg0, 1),
7973 const_binop (MINUS_EXPR, arg2,
7974 integer_one_node, 0), 1))
7975 return pedantic_non_lvalue
7976 (fold (build (MAX_EXPR, type, arg1, arg2)));
7977 break;
7978
7979 case GE_EXPR:
7980 /* If C1 is C2 + 1, this is max(A, C2). */
7981 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
7982 && operand_equal_p (TREE_OPERAND (arg0, 1),
7983 const_binop (PLUS_EXPR, arg2,
7984 integer_one_node, 0), 1))
7985 return pedantic_non_lvalue
7986 (fold (build (MAX_EXPR, type, arg1, arg2)));
7987 break;
7988 case NE_EXPR:
7989 break;
7990 default:
7991 abort ();
7992 }
7993 }
7994
7995 /* If the second operand is simpler than the third, swap them
7996 since that produces better jump optimization results. */
7997 if (tree_swap_operands_p (TREE_OPERAND (t, 1), TREE_OPERAND (t, 2)))
7998 {
7999 /* See if this can be inverted. If it can't, possibly because
8000 it was a floating-point inequality comparison, don't do
8001 anything. */
8002 tem = invert_truthvalue (arg0);
8003
8004 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8005 return fold (build (code, type, tem,
8006 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8007 }
8008
8009 /* Convert A ? 1 : 0 to simply A. */
8010 if (integer_onep (TREE_OPERAND (t, 1))
8011 && integer_zerop (TREE_OPERAND (t, 2))
8012 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8013 call to fold will try to move the conversion inside
8014 a COND, which will recurse. In that case, the COND_EXPR
8015 is probably the best choice, so leave it alone. */
8016 && type == TREE_TYPE (arg0))
8017 return pedantic_non_lvalue (arg0);
8018
8019 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8020 over COND_EXPR in cases such as floating point comparisons. */
8021 if (integer_zerop (TREE_OPERAND (t, 1))
8022 && integer_onep (TREE_OPERAND (t, 2))
8023 && truth_value_p (TREE_CODE (arg0)))
8024 return pedantic_non_lvalue (convert (type,
8025 invert_truthvalue (arg0)));
8026
8027 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8028 operation is simply A & 2. */
8029
8030 if (integer_zerop (TREE_OPERAND (t, 2))
8031 && TREE_CODE (arg0) == NE_EXPR
8032 && integer_zerop (TREE_OPERAND (arg0, 1))
8033 && integer_pow2p (arg1)
8034 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8035 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8036 arg1, 1))
8037 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
8038
8039 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8040 if (integer_zerop (TREE_OPERAND (t, 2))
8041 && truth_value_p (TREE_CODE (arg0))
8042 && truth_value_p (TREE_CODE (arg1)))
8043 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
8044 arg0, arg1)));
8045
8046 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8047 if (integer_onep (TREE_OPERAND (t, 2))
8048 && truth_value_p (TREE_CODE (arg0))
8049 && truth_value_p (TREE_CODE (arg1)))
8050 {
8051 /* Only perform transformation if ARG0 is easily inverted. */
8052 tem = invert_truthvalue (arg0);
8053 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8054 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
8055 tem, arg1)));
8056 }
8057
8058 return t;
8059
8060 case COMPOUND_EXPR:
8061 /* When pedantic, a compound expression can be neither an lvalue
8062 nor an integer constant expression. */
8063 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
8064 return t;
8065 /* Don't let (0, 0) be null pointer constant. */
8066 if (integer_zerop (arg1))
8067 return build1 (NOP_EXPR, type, arg1);
8068 return convert (type, arg1);
8069
8070 case COMPLEX_EXPR:
8071 if (wins)
8072 return build_complex (type, arg0, arg1);
8073 return t;
8074
8075 case REALPART_EXPR:
8076 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8077 return t;
8078 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8079 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8080 TREE_OPERAND (arg0, 1));
8081 else if (TREE_CODE (arg0) == COMPLEX_CST)
8082 return TREE_REALPART (arg0);
8083 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8084 return fold (build (TREE_CODE (arg0), type,
8085 fold (build1 (REALPART_EXPR, type,
8086 TREE_OPERAND (arg0, 0))),
8087 fold (build1 (REALPART_EXPR,
8088 type, TREE_OPERAND (arg0, 1)))));
8089 return t;
8090
8091 case IMAGPART_EXPR:
8092 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8093 return convert (type, integer_zero_node);
8094 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8095 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8096 TREE_OPERAND (arg0, 0));
8097 else if (TREE_CODE (arg0) == COMPLEX_CST)
8098 return TREE_IMAGPART (arg0);
8099 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8100 return fold (build (TREE_CODE (arg0), type,
8101 fold (build1 (IMAGPART_EXPR, type,
8102 TREE_OPERAND (arg0, 0))),
8103 fold (build1 (IMAGPART_EXPR, type,
8104 TREE_OPERAND (arg0, 1)))));
8105 return t;
8106
8107 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8108 appropriate. */
8109 case CLEANUP_POINT_EXPR:
8110 if (! has_cleanups (arg0))
8111 return TREE_OPERAND (t, 0);
8112
8113 {
8114 enum tree_code code0 = TREE_CODE (arg0);
8115 int kind0 = TREE_CODE_CLASS (code0);
8116 tree arg00 = TREE_OPERAND (arg0, 0);
8117 tree arg01;
8118
8119 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8120 return fold (build1 (code0, type,
8121 fold (build1 (CLEANUP_POINT_EXPR,
8122 TREE_TYPE (arg00), arg00))));
8123
8124 if (kind0 == '<' || kind0 == '2'
8125 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8126 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8127 || code0 == TRUTH_XOR_EXPR)
8128 {
8129 arg01 = TREE_OPERAND (arg0, 1);
8130
8131 if (TREE_CONSTANT (arg00)
8132 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8133 && ! has_cleanups (arg00)))
8134 return fold (build (code0, type, arg00,
8135 fold (build1 (CLEANUP_POINT_EXPR,
8136 TREE_TYPE (arg01), arg01))));
8137
8138 if (TREE_CONSTANT (arg01))
8139 return fold (build (code0, type,
8140 fold (build1 (CLEANUP_POINT_EXPR,
8141 TREE_TYPE (arg00), arg00)),
8142 arg01));
8143 }
8144
8145 return t;
8146 }
8147
8148 case CALL_EXPR:
8149 /* Check for a built-in function. */
8150 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
8151 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
8152 == FUNCTION_DECL)
8153 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
8154 {
8155 tree tmp = fold_builtin (expr);
8156 if (tmp)
8157 return tmp;
8158 }
8159 return t;
8160
8161 default:
8162 return t;
8163 } /* switch (code) */
8164 }
8165
8166 #ifdef ENABLE_FOLD_CHECKING
8167 #undef fold
8168
8169 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8170 static void fold_check_failed (tree, tree);
8171 void print_fold_checksum (tree);
8172
8173 /* When --enable-checking=fold, compute a digest of expr before
8174 and after actual fold call to see if fold did not accidentally
8175 change original expr. */
8176
8177 tree
8178 fold (tree expr)
8179 {
8180 tree ret;
8181 struct md5_ctx ctx;
8182 unsigned char checksum_before[16], checksum_after[16];
8183 htab_t ht;
8184
8185 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8186 md5_init_ctx (&ctx);
8187 fold_checksum_tree (expr, &ctx, ht);
8188 md5_finish_ctx (&ctx, checksum_before);
8189 htab_empty (ht);
8190
8191 ret = fold_1 (expr);
8192
8193 md5_init_ctx (&ctx);
8194 fold_checksum_tree (expr, &ctx, ht);
8195 md5_finish_ctx (&ctx, checksum_after);
8196 htab_delete (ht);
8197
8198 if (memcmp (checksum_before, checksum_after, 16))
8199 fold_check_failed (expr, ret);
8200
8201 return ret;
8202 }
8203
8204 void
8205 print_fold_checksum (tree expr)
8206 {
8207 struct md5_ctx ctx;
8208 unsigned char checksum[16], cnt;
8209 htab_t ht;
8210
8211 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8212 md5_init_ctx (&ctx);
8213 fold_checksum_tree (expr, &ctx, ht);
8214 md5_finish_ctx (&ctx, checksum);
8215 htab_delete (ht);
8216 for (cnt = 0; cnt < 16; ++cnt)
8217 fprintf (stderr, "%02x", checksum[cnt]);
8218 putc ('\n', stderr);
8219 }
8220
8221 static void
8222 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8223 {
8224 internal_error ("fold check: original tree changed by fold");
8225 }
8226
8227 static void
8228 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8229 {
8230 void **slot;
8231 enum tree_code code;
8232 char buf[sizeof (struct tree_decl)];
8233 int i, len;
8234
8235 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8236 > sizeof (struct tree_decl)
8237 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8238 abort ();
8239 if (expr == NULL)
8240 return;
8241 slot = htab_find_slot (ht, expr, INSERT);
8242 if (*slot != NULL)
8243 return;
8244 *slot = expr;
8245 code = TREE_CODE (expr);
8246 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8247 {
8248 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8249 memcpy (buf, expr, tree_size (expr));
8250 expr = (tree) buf;
8251 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8252 }
8253 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8254 {
8255 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8256 memcpy (buf, expr, tree_size (expr));
8257 expr = (tree) buf;
8258 SET_DECL_ASSEMBLER_NAME (expr, NULL);
8259 }
8260 else if (TREE_CODE_CLASS (code) == 't'
8261 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8262 {
8263 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8264 memcpy (buf, expr, tree_size (expr));
8265 expr = (tree) buf;
8266 TYPE_POINTER_TO (expr) = NULL;
8267 TYPE_REFERENCE_TO (expr) = NULL;
8268 }
8269 md5_process_bytes (expr, tree_size (expr), ctx);
8270 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8271 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8272 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8273 len = TREE_CODE_LENGTH (code);
8274 switch (TREE_CODE_CLASS (code))
8275 {
8276 case 'c':
8277 switch (code)
8278 {
8279 case STRING_CST:
8280 md5_process_bytes (TREE_STRING_POINTER (expr),
8281 TREE_STRING_LENGTH (expr), ctx);
8282 break;
8283 case COMPLEX_CST:
8284 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8285 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8286 break;
8287 case VECTOR_CST:
8288 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8289 break;
8290 default:
8291 break;
8292 }
8293 break;
8294 case 'x':
8295 switch (code)
8296 {
8297 case TREE_LIST:
8298 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8299 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8300 break;
8301 case TREE_VEC:
8302 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8303 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8304 break;
8305 default:
8306 break;
8307 }
8308 break;
8309 case 'e':
8310 switch (code)
8311 {
8312 case SAVE_EXPR: len = 2; break;
8313 case GOTO_SUBROUTINE_EXPR: len = 0; break;
8314 case RTL_EXPR: len = 0; break;
8315 case WITH_CLEANUP_EXPR: len = 2; break;
8316 default: break;
8317 }
8318 /* FALLTHROUGH */
8319 case 'r':
8320 case '<':
8321 case '1':
8322 case '2':
8323 case 's':
8324 for (i = 0; i < len; ++i)
8325 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8326 break;
8327 case 'd':
8328 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8329 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8330 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8331 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8332 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8333 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8334 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8335 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8336 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8337 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8338 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8339 break;
8340 case 't':
8341 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
8342 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
8343 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
8344 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
8345 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
8346 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
8347 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
8348 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
8349 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
8350 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
8351 break;
8352 default:
8353 break;
8354 }
8355 }
8356
8357 #endif
8358
8359 /* Perform constant folding and related simplification of initializer
8360 expression EXPR. This behaves identically to "fold" but ignores
8361 potential run-time traps and exceptions that fold must preserve. */
8362
8363 tree
8364 fold_initializer (tree expr)
8365 {
8366 int saved_signaling_nans = flag_signaling_nans;
8367 int saved_trapping_math = flag_trapping_math;
8368 int saved_trapv = flag_trapv;
8369 tree result;
8370
8371 flag_signaling_nans = 0;
8372 flag_trapping_math = 0;
8373 flag_trapv = 0;
8374
8375 result = fold (expr);
8376
8377 flag_signaling_nans = saved_signaling_nans;
8378 flag_trapping_math = saved_trapping_math;
8379 flag_trapv = saved_trapv;
8380
8381 return result;
8382 }
8383
8384 /* Determine if first argument is a multiple of second argument. Return 0 if
8385 it is not, or we cannot easily determined it to be.
8386
8387 An example of the sort of thing we care about (at this point; this routine
8388 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8389 fold cases do now) is discovering that
8390
8391 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8392
8393 is a multiple of
8394
8395 SAVE_EXPR (J * 8)
8396
8397 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8398
8399 This code also handles discovering that
8400
8401 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8402
8403 is a multiple of 8 so we don't have to worry about dealing with a
8404 possible remainder.
8405
8406 Note that we *look* inside a SAVE_EXPR only to determine how it was
8407 calculated; it is not safe for fold to do much of anything else with the
8408 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8409 at run time. For example, the latter example above *cannot* be implemented
8410 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8411 evaluation time of the original SAVE_EXPR is not necessarily the same at
8412 the time the new expression is evaluated. The only optimization of this
8413 sort that would be valid is changing
8414
8415 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8416
8417 divided by 8 to
8418
8419 SAVE_EXPR (I) * SAVE_EXPR (J)
8420
8421 (where the same SAVE_EXPR (J) is used in the original and the
8422 transformed version). */
8423
8424 static int
8425 multiple_of_p (tree type, tree top, tree bottom)
8426 {
8427 if (operand_equal_p (top, bottom, 0))
8428 return 1;
8429
8430 if (TREE_CODE (type) != INTEGER_TYPE)
8431 return 0;
8432
8433 switch (TREE_CODE (top))
8434 {
8435 case MULT_EXPR:
8436 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8437 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8438
8439 case PLUS_EXPR:
8440 case MINUS_EXPR:
8441 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8442 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8443
8444 case LSHIFT_EXPR:
8445 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
8446 {
8447 tree op1, t1;
8448
8449 op1 = TREE_OPERAND (top, 1);
8450 /* const_binop may not detect overflow correctly,
8451 so check for it explicitly here. */
8452 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
8453 > TREE_INT_CST_LOW (op1)
8454 && TREE_INT_CST_HIGH (op1) == 0
8455 && 0 != (t1 = convert (type,
8456 const_binop (LSHIFT_EXPR, size_one_node,
8457 op1, 0)))
8458 && ! TREE_OVERFLOW (t1))
8459 return multiple_of_p (type, t1, bottom);
8460 }
8461 return 0;
8462
8463 case NOP_EXPR:
8464 /* Can't handle conversions from non-integral or wider integral type. */
8465 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
8466 || (TYPE_PRECISION (type)
8467 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
8468 return 0;
8469
8470 /* .. fall through ... */
8471
8472 case SAVE_EXPR:
8473 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
8474
8475 case INTEGER_CST:
8476 if (TREE_CODE (bottom) != INTEGER_CST
8477 || (TREE_UNSIGNED (type)
8478 && (tree_int_cst_sgn (top) < 0
8479 || tree_int_cst_sgn (bottom) < 0)))
8480 return 0;
8481 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
8482 top, bottom, 0));
8483
8484 default:
8485 return 0;
8486 }
8487 }
8488
8489 /* Return true if `t' is known to be non-negative. */
8490
8491 int
8492 tree_expr_nonnegative_p (tree t)
8493 {
8494 switch (TREE_CODE (t))
8495 {
8496 case ABS_EXPR:
8497 return 1;
8498
8499 case INTEGER_CST:
8500 return tree_int_cst_sgn (t) >= 0;
8501
8502 case REAL_CST:
8503 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
8504
8505 case PLUS_EXPR:
8506 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8507 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8508 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8509
8510 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8511 both unsigned and at least 2 bits shorter than the result. */
8512 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8513 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8514 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8515 {
8516 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8517 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8518 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8519 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8520 {
8521 unsigned int prec = MAX (TYPE_PRECISION (inner1),
8522 TYPE_PRECISION (inner2)) + 1;
8523 return prec < TYPE_PRECISION (TREE_TYPE (t));
8524 }
8525 }
8526 break;
8527
8528 case MULT_EXPR:
8529 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8530 {
8531 /* x * x for floating point x is always non-negative. */
8532 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8533 return 1;
8534 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8535 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8536 }
8537
8538 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8539 both unsigned and their total bits is shorter than the result. */
8540 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8541 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8542 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8543 {
8544 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8545 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8546 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8547 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8548 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8549 < TYPE_PRECISION (TREE_TYPE (t));
8550 }
8551 return 0;
8552
8553 case TRUNC_DIV_EXPR:
8554 case CEIL_DIV_EXPR:
8555 case FLOOR_DIV_EXPR:
8556 case ROUND_DIV_EXPR:
8557 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8558 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8559
8560 case TRUNC_MOD_EXPR:
8561 case CEIL_MOD_EXPR:
8562 case FLOOR_MOD_EXPR:
8563 case ROUND_MOD_EXPR:
8564 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8565
8566 case RDIV_EXPR:
8567 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8568 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8569
8570 case NOP_EXPR:
8571 {
8572 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8573 tree outer_type = TREE_TYPE (t);
8574
8575 if (TREE_CODE (outer_type) == REAL_TYPE)
8576 {
8577 if (TREE_CODE (inner_type) == REAL_TYPE)
8578 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8579 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8580 {
8581 if (TREE_UNSIGNED (inner_type))
8582 return 1;
8583 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8584 }
8585 }
8586 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8587 {
8588 if (TREE_CODE (inner_type) == REAL_TYPE)
8589 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8590 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8591 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8592 && TREE_UNSIGNED (inner_type);
8593 }
8594 }
8595 break;
8596
8597 case COND_EXPR:
8598 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8599 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
8600 case COMPOUND_EXPR:
8601 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8602 case MIN_EXPR:
8603 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8604 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8605 case MAX_EXPR:
8606 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8607 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8608 case MODIFY_EXPR:
8609 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8610 case BIND_EXPR:
8611 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8612 case SAVE_EXPR:
8613 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8614 case NON_LVALUE_EXPR:
8615 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8616 case FLOAT_EXPR:
8617 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8618 case RTL_EXPR:
8619 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
8620
8621 case CALL_EXPR:
8622 {
8623 tree fndecl = get_callee_fndecl (t);
8624 tree arglist = TREE_OPERAND (t, 1);
8625 if (fndecl
8626 && DECL_BUILT_IN (fndecl)
8627 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
8628 switch (DECL_FUNCTION_CODE (fndecl))
8629 {
8630 case BUILT_IN_CABS:
8631 case BUILT_IN_CABSL:
8632 case BUILT_IN_CABSF:
8633 case BUILT_IN_EXP:
8634 case BUILT_IN_EXPF:
8635 case BUILT_IN_EXPL:
8636 case BUILT_IN_EXP2:
8637 case BUILT_IN_EXP2F:
8638 case BUILT_IN_EXP2L:
8639 case BUILT_IN_EXP10:
8640 case BUILT_IN_EXP10F:
8641 case BUILT_IN_EXP10L:
8642 case BUILT_IN_FABS:
8643 case BUILT_IN_FABSF:
8644 case BUILT_IN_FABSL:
8645 case BUILT_IN_FFS:
8646 case BUILT_IN_FFSL:
8647 case BUILT_IN_FFSLL:
8648 case BUILT_IN_PARITY:
8649 case BUILT_IN_PARITYL:
8650 case BUILT_IN_PARITYLL:
8651 case BUILT_IN_POPCOUNT:
8652 case BUILT_IN_POPCOUNTL:
8653 case BUILT_IN_POPCOUNTLL:
8654 case BUILT_IN_POW10:
8655 case BUILT_IN_POW10F:
8656 case BUILT_IN_POW10L:
8657 case BUILT_IN_SQRT:
8658 case BUILT_IN_SQRTF:
8659 case BUILT_IN_SQRTL:
8660 return 1;
8661
8662 case BUILT_IN_ATAN:
8663 case BUILT_IN_ATANF:
8664 case BUILT_IN_ATANL:
8665 case BUILT_IN_CEIL:
8666 case BUILT_IN_CEILF:
8667 case BUILT_IN_CEILL:
8668 case BUILT_IN_FLOOR:
8669 case BUILT_IN_FLOORF:
8670 case BUILT_IN_FLOORL:
8671 case BUILT_IN_NEARBYINT:
8672 case BUILT_IN_NEARBYINTF:
8673 case BUILT_IN_NEARBYINTL:
8674 case BUILT_IN_ROUND:
8675 case BUILT_IN_ROUNDF:
8676 case BUILT_IN_ROUNDL:
8677 case BUILT_IN_TRUNC:
8678 case BUILT_IN_TRUNCF:
8679 case BUILT_IN_TRUNCL:
8680 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8681
8682 case BUILT_IN_POW:
8683 case BUILT_IN_POWF:
8684 case BUILT_IN_POWL:
8685 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8686
8687 default:
8688 break;
8689 }
8690 }
8691
8692 /* ... fall through ... */
8693
8694 default:
8695 if (truth_value_p (TREE_CODE (t)))
8696 /* Truth values evaluate to 0 or 1, which is nonnegative. */
8697 return 1;
8698 }
8699
8700 /* We don't know sign of `t', so be conservative and return false. */
8701 return 0;
8702 }
8703
8704 /* Return true if `r' is known to be non-negative.
8705 Only handles constants at the moment. */
8706
8707 int
8708 rtl_expr_nonnegative_p (rtx r)
8709 {
8710 switch (GET_CODE (r))
8711 {
8712 case CONST_INT:
8713 return INTVAL (r) >= 0;
8714
8715 case CONST_DOUBLE:
8716 if (GET_MODE (r) == VOIDmode)
8717 return CONST_DOUBLE_HIGH (r) >= 0;
8718 return 0;
8719
8720 case CONST_VECTOR:
8721 {
8722 int units, i;
8723 rtx elt;
8724
8725 units = CONST_VECTOR_NUNITS (r);
8726
8727 for (i = 0; i < units; ++i)
8728 {
8729 elt = CONST_VECTOR_ELT (r, i);
8730 if (!rtl_expr_nonnegative_p (elt))
8731 return 0;
8732 }
8733
8734 return 1;
8735 }
8736
8737 case SYMBOL_REF:
8738 case LABEL_REF:
8739 /* These are always nonnegative. */
8740 return 1;
8741
8742 default:
8743 return 0;
8744 }
8745 }
8746
8747 #include "gt-fold-const.h"