fold-const.c: Follow spelling conventions.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "real.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "toplev.h"
56 #include "ggc.h"
57 #include "hashtab.h"
58 #include "langhooks.h"
59 #include "md5.h"
60
61 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
62 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
63 static bool negate_expr_p (tree);
64 static tree negate_expr (tree);
65 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
66 static tree associate_trees (tree, tree, enum tree_code, tree);
67 static tree int_const_binop (enum tree_code, tree, tree, int);
68 static tree const_binop (enum tree_code, tree, tree, int);
69 static hashval_t size_htab_hash (const void *);
70 static int size_htab_eq (const void *, const void *);
71 static tree fold_convert (tree, tree);
72 static enum tree_code invert_tree_comparison (enum tree_code);
73 static enum tree_code swap_tree_comparison (enum tree_code);
74 static int comparison_to_compcode (enum tree_code);
75 static enum tree_code compcode_to_comparison (int);
76 static int truth_value_p (enum tree_code);
77 static int operand_equal_for_comparison_p (tree, tree, tree);
78 static int twoval_comparison_p (tree, tree *, tree *, int *);
79 static tree eval_subst (tree, tree, tree, tree, tree);
80 static tree pedantic_omit_one_operand (tree, tree, tree);
81 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
82 static tree make_bit_field_ref (tree, tree, int, int, int);
83 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
84 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
85 enum machine_mode *, int *, int *,
86 tree *, tree *);
87 static int all_ones_mask_p (tree, int);
88 static tree sign_bit_p (tree, tree);
89 static int simple_operand_p (tree);
90 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
91 static tree make_range (tree, int *, tree *, tree *);
92 static tree build_range_check (tree, tree, int, tree, tree);
93 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
94 tree);
95 static tree fold_range_test (tree);
96 static tree unextend (tree, int, int, tree);
97 static tree fold_truthop (enum tree_code, tree, tree, tree);
98 static tree optimize_minmax_comparison (tree);
99 static tree extract_muldiv (tree, tree, enum tree_code, tree);
100 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
101 static tree strip_compound_expr (tree, tree);
102 static int multiple_of_p (tree, tree, tree);
103 static tree constant_boolean_node (int, tree);
104 static int count_cond (tree, int);
105 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
106 tree, int);
107 static bool fold_real_zero_addition_p (tree, tree, int);
108 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
109 tree, tree, tree);
110 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
111 static bool tree_swap_operands_p (tree, tree);
112
113 /* The following constants represent a bit based encoding of GCC's
114 comparison operators. This encoding simplifies transformations
115 on relational comparison operators, such as AND and OR. */
116 #define COMPCODE_FALSE 0
117 #define COMPCODE_LT 1
118 #define COMPCODE_EQ 2
119 #define COMPCODE_LE 3
120 #define COMPCODE_GT 4
121 #define COMPCODE_NE 5
122 #define COMPCODE_GE 6
123 #define COMPCODE_TRUE 7
124
125 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
126 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
127 and SUM1. Then this yields nonzero if overflow occurred during the
128 addition.
129
130 Overflow occurs if A and B have the same sign, but A and SUM differ in
131 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
132 sign. */
133 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
134 \f
135 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
136 We do that by representing the two-word integer in 4 words, with only
137 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
138 number. The value of the word is LOWPART + HIGHPART * BASE. */
139
140 #define LOWPART(x) \
141 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
142 #define HIGHPART(x) \
143 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
144 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
145
146 /* Unpack a two-word integer into 4 words.
147 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
148 WORDS points to the array of HOST_WIDE_INTs. */
149
150 static void
151 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
152 {
153 words[0] = LOWPART (low);
154 words[1] = HIGHPART (low);
155 words[2] = LOWPART (hi);
156 words[3] = HIGHPART (hi);
157 }
158
159 /* Pack an array of 4 words into a two-word integer.
160 WORDS points to the array of words.
161 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
162
163 static void
164 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
165 HOST_WIDE_INT *hi)
166 {
167 *low = words[0] + words[1] * BASE;
168 *hi = words[2] + words[3] * BASE;
169 }
170 \f
171 /* Make the integer constant T valid for its type by setting to 0 or 1 all
172 the bits in the constant that don't belong in the type.
173
174 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
175 nonzero, a signed overflow has already occurred in calculating T, so
176 propagate it. */
177
178 int
179 force_fit_type (tree t, int overflow)
180 {
181 unsigned HOST_WIDE_INT low;
182 HOST_WIDE_INT high;
183 unsigned int prec;
184
185 if (TREE_CODE (t) == REAL_CST)
186 {
187 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
188 Consider doing it via real_convert now. */
189 return overflow;
190 }
191
192 else if (TREE_CODE (t) != INTEGER_CST)
193 return overflow;
194
195 low = TREE_INT_CST_LOW (t);
196 high = TREE_INT_CST_HIGH (t);
197
198 if (POINTER_TYPE_P (TREE_TYPE (t))
199 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
200 prec = POINTER_SIZE;
201 else
202 prec = TYPE_PRECISION (TREE_TYPE (t));
203
204 /* First clear all bits that are beyond the type's precision. */
205
206 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
207 ;
208 else if (prec > HOST_BITS_PER_WIDE_INT)
209 TREE_INT_CST_HIGH (t)
210 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
211 else
212 {
213 TREE_INT_CST_HIGH (t) = 0;
214 if (prec < HOST_BITS_PER_WIDE_INT)
215 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
216 }
217
218 /* Unsigned types do not suffer sign extension or overflow unless they
219 are a sizetype. */
220 if (TREE_UNSIGNED (TREE_TYPE (t))
221 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
222 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
223 return overflow;
224
225 /* If the value's sign bit is set, extend the sign. */
226 if (prec != 2 * HOST_BITS_PER_WIDE_INT
227 && (prec > HOST_BITS_PER_WIDE_INT
228 ? 0 != (TREE_INT_CST_HIGH (t)
229 & ((HOST_WIDE_INT) 1
230 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
231 : 0 != (TREE_INT_CST_LOW (t)
232 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
233 {
234 /* Value is negative:
235 set to 1 all the bits that are outside this type's precision. */
236 if (prec > HOST_BITS_PER_WIDE_INT)
237 TREE_INT_CST_HIGH (t)
238 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
239 else
240 {
241 TREE_INT_CST_HIGH (t) = -1;
242 if (prec < HOST_BITS_PER_WIDE_INT)
243 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
244 }
245 }
246
247 /* Return nonzero if signed overflow occurred. */
248 return
249 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
250 != 0);
251 }
252 \f
253 /* Add two doubleword integers with doubleword result.
254 Each argument is given as two `HOST_WIDE_INT' pieces.
255 One argument is L1 and H1; the other, L2 and H2.
256 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
257
258 int
259 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
260 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
261 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
262 {
263 unsigned HOST_WIDE_INT l;
264 HOST_WIDE_INT h;
265
266 l = l1 + l2;
267 h = h1 + h2 + (l < l1);
268
269 *lv = l;
270 *hv = h;
271 return OVERFLOW_SUM_SIGN (h1, h2, h);
272 }
273
274 /* Negate a doubleword integer with doubleword result.
275 Return nonzero if the operation overflows, assuming it's signed.
276 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
277 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
278
279 int
280 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
281 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
282 {
283 if (l1 == 0)
284 {
285 *lv = 0;
286 *hv = - h1;
287 return (*hv & h1) < 0;
288 }
289 else
290 {
291 *lv = -l1;
292 *hv = ~h1;
293 return 0;
294 }
295 }
296 \f
297 /* Multiply two doubleword integers with doubleword result.
298 Return nonzero if the operation overflows, assuming it's signed.
299 Each argument is given as two `HOST_WIDE_INT' pieces.
300 One argument is L1 and H1; the other, L2 and H2.
301 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
302
303 int
304 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
305 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
306 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
307 {
308 HOST_WIDE_INT arg1[4];
309 HOST_WIDE_INT arg2[4];
310 HOST_WIDE_INT prod[4 * 2];
311 unsigned HOST_WIDE_INT carry;
312 int i, j, k;
313 unsigned HOST_WIDE_INT toplow, neglow;
314 HOST_WIDE_INT tophigh, neghigh;
315
316 encode (arg1, l1, h1);
317 encode (arg2, l2, h2);
318
319 memset (prod, 0, sizeof prod);
320
321 for (i = 0; i < 4; i++)
322 {
323 carry = 0;
324 for (j = 0; j < 4; j++)
325 {
326 k = i + j;
327 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
328 carry += arg1[i] * arg2[j];
329 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
330 carry += prod[k];
331 prod[k] = LOWPART (carry);
332 carry = HIGHPART (carry);
333 }
334 prod[i + 4] = carry;
335 }
336
337 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
338
339 /* Check for overflow by calculating the top half of the answer in full;
340 it should agree with the low half's sign bit. */
341 decode (prod + 4, &toplow, &tophigh);
342 if (h1 < 0)
343 {
344 neg_double (l2, h2, &neglow, &neghigh);
345 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
346 }
347 if (h2 < 0)
348 {
349 neg_double (l1, h1, &neglow, &neghigh);
350 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
351 }
352 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
353 }
354 \f
355 /* Shift the doubleword integer in L1, H1 left by COUNT places
356 keeping only PREC bits of result.
357 Shift right if COUNT is negative.
358 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
359 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
360
361 void
362 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
363 HOST_WIDE_INT count, unsigned int prec,
364 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
365 {
366 unsigned HOST_WIDE_INT signmask;
367
368 if (count < 0)
369 {
370 rshift_double (l1, h1, -count, prec, lv, hv, arith);
371 return;
372 }
373
374 #ifdef SHIFT_COUNT_TRUNCATED
375 if (SHIFT_COUNT_TRUNCATED)
376 count %= prec;
377 #endif
378
379 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
380 {
381 /* Shifting by the host word size is undefined according to the
382 ANSI standard, so we must handle this as a special case. */
383 *hv = 0;
384 *lv = 0;
385 }
386 else if (count >= HOST_BITS_PER_WIDE_INT)
387 {
388 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
389 *lv = 0;
390 }
391 else
392 {
393 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
394 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
395 *lv = l1 << count;
396 }
397
398 /* Sign extend all bits that are beyond the precision. */
399
400 signmask = -((prec > HOST_BITS_PER_WIDE_INT
401 ? ((unsigned HOST_WIDE_INT) *hv
402 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
403 : (*lv >> (prec - 1))) & 1);
404
405 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
406 ;
407 else if (prec >= HOST_BITS_PER_WIDE_INT)
408 {
409 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
410 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
411 }
412 else
413 {
414 *hv = signmask;
415 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
416 *lv |= signmask << prec;
417 }
418 }
419
420 /* Shift the doubleword integer in L1, H1 right by COUNT places
421 keeping only PREC bits of result. COUNT must be positive.
422 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
423 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
424
425 void
426 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
427 HOST_WIDE_INT count, unsigned int prec,
428 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
429 int arith)
430 {
431 unsigned HOST_WIDE_INT signmask;
432
433 signmask = (arith
434 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
435 : 0);
436
437 #ifdef SHIFT_COUNT_TRUNCATED
438 if (SHIFT_COUNT_TRUNCATED)
439 count %= prec;
440 #endif
441
442 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
443 {
444 /* Shifting by the host word size is undefined according to the
445 ANSI standard, so we must handle this as a special case. */
446 *hv = 0;
447 *lv = 0;
448 }
449 else if (count >= HOST_BITS_PER_WIDE_INT)
450 {
451 *hv = 0;
452 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
453 }
454 else
455 {
456 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
457 *lv = ((l1 >> count)
458 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
459 }
460
461 /* Zero / sign extend all bits that are beyond the precision. */
462
463 if (count >= (HOST_WIDE_INT)prec)
464 {
465 *hv = signmask;
466 *lv = signmask;
467 }
468 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
469 ;
470 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
471 {
472 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
473 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
474 }
475 else
476 {
477 *hv = signmask;
478 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
479 *lv |= signmask << (prec - count);
480 }
481 }
482 \f
483 /* Rotate the doubleword integer in L1, H1 left by COUNT places
484 keeping only PREC bits of result.
485 Rotate right if COUNT is negative.
486 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
487
488 void
489 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
490 HOST_WIDE_INT count, unsigned int prec,
491 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
492 {
493 unsigned HOST_WIDE_INT s1l, s2l;
494 HOST_WIDE_INT s1h, s2h;
495
496 count %= prec;
497 if (count < 0)
498 count += prec;
499
500 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
501 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
502 *lv = s1l | s2l;
503 *hv = s1h | s2h;
504 }
505
506 /* Rotate the doubleword integer in L1, H1 left by COUNT places
507 keeping only PREC bits of result. COUNT must be positive.
508 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
509
510 void
511 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
512 HOST_WIDE_INT count, unsigned int prec,
513 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
514 {
515 unsigned HOST_WIDE_INT s1l, s2l;
516 HOST_WIDE_INT s1h, s2h;
517
518 count %= prec;
519 if (count < 0)
520 count += prec;
521
522 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
523 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
524 *lv = s1l | s2l;
525 *hv = s1h | s2h;
526 }
527 \f
528 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
529 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
530 CODE is a tree code for a kind of division, one of
531 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
532 or EXACT_DIV_EXPR
533 It controls how the quotient is rounded to an integer.
534 Return nonzero if the operation overflows.
535 UNS nonzero says do unsigned division. */
536
537 int
538 div_and_round_double (enum tree_code code, int uns,
539 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
540 HOST_WIDE_INT hnum_orig,
541 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
542 HOST_WIDE_INT hden_orig,
543 unsigned HOST_WIDE_INT *lquo,
544 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
545 HOST_WIDE_INT *hrem)
546 {
547 int quo_neg = 0;
548 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
549 HOST_WIDE_INT den[4], quo[4];
550 int i, j;
551 unsigned HOST_WIDE_INT work;
552 unsigned HOST_WIDE_INT carry = 0;
553 unsigned HOST_WIDE_INT lnum = lnum_orig;
554 HOST_WIDE_INT hnum = hnum_orig;
555 unsigned HOST_WIDE_INT lden = lden_orig;
556 HOST_WIDE_INT hden = hden_orig;
557 int overflow = 0;
558
559 if (hden == 0 && lden == 0)
560 overflow = 1, lden = 1;
561
562 /* Calculate quotient sign and convert operands to unsigned. */
563 if (!uns)
564 {
565 if (hnum < 0)
566 {
567 quo_neg = ~ quo_neg;
568 /* (minimum integer) / (-1) is the only overflow case. */
569 if (neg_double (lnum, hnum, &lnum, &hnum)
570 && ((HOST_WIDE_INT) lden & hden) == -1)
571 overflow = 1;
572 }
573 if (hden < 0)
574 {
575 quo_neg = ~ quo_neg;
576 neg_double (lden, hden, &lden, &hden);
577 }
578 }
579
580 if (hnum == 0 && hden == 0)
581 { /* single precision */
582 *hquo = *hrem = 0;
583 /* This unsigned division rounds toward zero. */
584 *lquo = lnum / lden;
585 goto finish_up;
586 }
587
588 if (hnum == 0)
589 { /* trivial case: dividend < divisor */
590 /* hden != 0 already checked. */
591 *hquo = *lquo = 0;
592 *hrem = hnum;
593 *lrem = lnum;
594 goto finish_up;
595 }
596
597 memset (quo, 0, sizeof quo);
598
599 memset (num, 0, sizeof num); /* to zero 9th element */
600 memset (den, 0, sizeof den);
601
602 encode (num, lnum, hnum);
603 encode (den, lden, hden);
604
605 /* Special code for when the divisor < BASE. */
606 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
607 {
608 /* hnum != 0 already checked. */
609 for (i = 4 - 1; i >= 0; i--)
610 {
611 work = num[i] + carry * BASE;
612 quo[i] = work / lden;
613 carry = work % lden;
614 }
615 }
616 else
617 {
618 /* Full double precision division,
619 with thanks to Don Knuth's "Seminumerical Algorithms". */
620 int num_hi_sig, den_hi_sig;
621 unsigned HOST_WIDE_INT quo_est, scale;
622
623 /* Find the highest nonzero divisor digit. */
624 for (i = 4 - 1;; i--)
625 if (den[i] != 0)
626 {
627 den_hi_sig = i;
628 break;
629 }
630
631 /* Insure that the first digit of the divisor is at least BASE/2.
632 This is required by the quotient digit estimation algorithm. */
633
634 scale = BASE / (den[den_hi_sig] + 1);
635 if (scale > 1)
636 { /* scale divisor and dividend */
637 carry = 0;
638 for (i = 0; i <= 4 - 1; i++)
639 {
640 work = (num[i] * scale) + carry;
641 num[i] = LOWPART (work);
642 carry = HIGHPART (work);
643 }
644
645 num[4] = carry;
646 carry = 0;
647 for (i = 0; i <= 4 - 1; i++)
648 {
649 work = (den[i] * scale) + carry;
650 den[i] = LOWPART (work);
651 carry = HIGHPART (work);
652 if (den[i] != 0) den_hi_sig = i;
653 }
654 }
655
656 num_hi_sig = 4;
657
658 /* Main loop */
659 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
660 {
661 /* Guess the next quotient digit, quo_est, by dividing the first
662 two remaining dividend digits by the high order quotient digit.
663 quo_est is never low and is at most 2 high. */
664 unsigned HOST_WIDE_INT tmp;
665
666 num_hi_sig = i + den_hi_sig + 1;
667 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
668 if (num[num_hi_sig] != den[den_hi_sig])
669 quo_est = work / den[den_hi_sig];
670 else
671 quo_est = BASE - 1;
672
673 /* Refine quo_est so it's usually correct, and at most one high. */
674 tmp = work - quo_est * den[den_hi_sig];
675 if (tmp < BASE
676 && (den[den_hi_sig - 1] * quo_est
677 > (tmp * BASE + num[num_hi_sig - 2])))
678 quo_est--;
679
680 /* Try QUO_EST as the quotient digit, by multiplying the
681 divisor by QUO_EST and subtracting from the remaining dividend.
682 Keep in mind that QUO_EST is the I - 1st digit. */
683
684 carry = 0;
685 for (j = 0; j <= den_hi_sig; j++)
686 {
687 work = quo_est * den[j] + carry;
688 carry = HIGHPART (work);
689 work = num[i + j] - LOWPART (work);
690 num[i + j] = LOWPART (work);
691 carry += HIGHPART (work) != 0;
692 }
693
694 /* If quo_est was high by one, then num[i] went negative and
695 we need to correct things. */
696 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
697 {
698 quo_est--;
699 carry = 0; /* add divisor back in */
700 for (j = 0; j <= den_hi_sig; j++)
701 {
702 work = num[i + j] + den[j] + carry;
703 carry = HIGHPART (work);
704 num[i + j] = LOWPART (work);
705 }
706
707 num [num_hi_sig] += carry;
708 }
709
710 /* Store the quotient digit. */
711 quo[i] = quo_est;
712 }
713 }
714
715 decode (quo, lquo, hquo);
716
717 finish_up:
718 /* If result is negative, make it so. */
719 if (quo_neg)
720 neg_double (*lquo, *hquo, lquo, hquo);
721
722 /* compute trial remainder: rem = num - (quo * den) */
723 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
724 neg_double (*lrem, *hrem, lrem, hrem);
725 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
726
727 switch (code)
728 {
729 case TRUNC_DIV_EXPR:
730 case TRUNC_MOD_EXPR: /* round toward zero */
731 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
732 return overflow;
733
734 case FLOOR_DIV_EXPR:
735 case FLOOR_MOD_EXPR: /* round toward negative infinity */
736 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
737 {
738 /* quo = quo - 1; */
739 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
740 lquo, hquo);
741 }
742 else
743 return overflow;
744 break;
745
746 case CEIL_DIV_EXPR:
747 case CEIL_MOD_EXPR: /* round toward positive infinity */
748 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
749 {
750 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
751 lquo, hquo);
752 }
753 else
754 return overflow;
755 break;
756
757 case ROUND_DIV_EXPR:
758 case ROUND_MOD_EXPR: /* round to closest integer */
759 {
760 unsigned HOST_WIDE_INT labs_rem = *lrem;
761 HOST_WIDE_INT habs_rem = *hrem;
762 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
763 HOST_WIDE_INT habs_den = hden, htwice;
764
765 /* Get absolute values. */
766 if (*hrem < 0)
767 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
768 if (hden < 0)
769 neg_double (lden, hden, &labs_den, &habs_den);
770
771 /* If (2 * abs (lrem) >= abs (lden)) */
772 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
773 labs_rem, habs_rem, &ltwice, &htwice);
774
775 if (((unsigned HOST_WIDE_INT) habs_den
776 < (unsigned HOST_WIDE_INT) htwice)
777 || (((unsigned HOST_WIDE_INT) habs_den
778 == (unsigned HOST_WIDE_INT) htwice)
779 && (labs_den < ltwice)))
780 {
781 if (*hquo < 0)
782 /* quo = quo - 1; */
783 add_double (*lquo, *hquo,
784 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
785 else
786 /* quo = quo + 1; */
787 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
788 lquo, hquo);
789 }
790 else
791 return overflow;
792 }
793 break;
794
795 default:
796 abort ();
797 }
798
799 /* compute true remainder: rem = num - (quo * den) */
800 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
801 neg_double (*lrem, *hrem, lrem, hrem);
802 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
803 return overflow;
804 }
805 \f
806 /* Determine whether an expression T can be cheaply negated using
807 the function negate_expr. */
808
809 static bool
810 negate_expr_p (tree t)
811 {
812 unsigned HOST_WIDE_INT val;
813 unsigned int prec;
814 tree type;
815
816 if (t == 0)
817 return false;
818
819 type = TREE_TYPE (t);
820
821 STRIP_SIGN_NOPS (t);
822 switch (TREE_CODE (t))
823 {
824 case INTEGER_CST:
825 if (TREE_UNSIGNED (type))
826 return false;
827
828 /* Check that -CST will not overflow type. */
829 prec = TYPE_PRECISION (type);
830 if (prec > HOST_BITS_PER_WIDE_INT)
831 {
832 if (TREE_INT_CST_LOW (t) != 0)
833 return true;
834 prec -= HOST_BITS_PER_WIDE_INT;
835 val = TREE_INT_CST_HIGH (t);
836 }
837 else
838 val = TREE_INT_CST_LOW (t);
839 if (prec < HOST_BITS_PER_WIDE_INT)
840 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
841 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
842
843 case REAL_CST:
844 case NEGATE_EXPR:
845 return true;
846
847 case MINUS_EXPR:
848 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
849 return ! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations;
850
851 case MULT_EXPR:
852 if (TREE_UNSIGNED (TREE_TYPE (t)))
853 break;
854
855 /* Fall through. */
856
857 case RDIV_EXPR:
858 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
859 return negate_expr_p (TREE_OPERAND (t, 1))
860 || negate_expr_p (TREE_OPERAND (t, 0));
861 break;
862
863 default:
864 break;
865 }
866 return false;
867 }
868
869 /* Given T, an expression, return the negation of T. Allow for T to be
870 null, in which case return null. */
871
872 static tree
873 negate_expr (tree t)
874 {
875 tree type;
876 tree tem;
877
878 if (t == 0)
879 return 0;
880
881 type = TREE_TYPE (t);
882 STRIP_SIGN_NOPS (t);
883
884 switch (TREE_CODE (t))
885 {
886 case INTEGER_CST:
887 if (! TREE_UNSIGNED (type)
888 && 0 != (tem = fold (build1 (NEGATE_EXPR, type, t)))
889 && ! TREE_OVERFLOW (tem))
890 return tem;
891 break;
892
893 case REAL_CST:
894 tem = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (t)));
895 /* Two's complement FP formats, such as c4x, may overflow. */
896 if (! TREE_OVERFLOW (tem))
897 return convert (type, tem);
898 break;
899
900 case NEGATE_EXPR:
901 return convert (type, TREE_OPERAND (t, 0));
902
903 case MINUS_EXPR:
904 /* - (A - B) -> B - A */
905 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
906 return convert (type,
907 fold (build (MINUS_EXPR, TREE_TYPE (t),
908 TREE_OPERAND (t, 1),
909 TREE_OPERAND (t, 0))));
910 break;
911
912 case MULT_EXPR:
913 if (TREE_UNSIGNED (TREE_TYPE (t)))
914 break;
915
916 /* Fall through. */
917
918 case RDIV_EXPR:
919 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
920 {
921 tem = TREE_OPERAND (t, 1);
922 if (negate_expr_p (tem))
923 return convert (type,
924 fold (build (TREE_CODE (t), TREE_TYPE (t),
925 TREE_OPERAND (t, 0),
926 negate_expr (tem))));
927 tem = TREE_OPERAND (t, 0);
928 if (negate_expr_p (tem))
929 return convert (type,
930 fold (build (TREE_CODE (t), TREE_TYPE (t),
931 negate_expr (tem),
932 TREE_OPERAND (t, 1))));
933 }
934 break;
935
936 default:
937 break;
938 }
939
940 return convert (type, fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t)));
941 }
942 \f
943 /* Split a tree IN into a constant, literal and variable parts that could be
944 combined with CODE to make IN. "constant" means an expression with
945 TREE_CONSTANT but that isn't an actual constant. CODE must be a
946 commutative arithmetic operation. Store the constant part into *CONP,
947 the literal in *LITP and return the variable part. If a part isn't
948 present, set it to null. If the tree does not decompose in this way,
949 return the entire tree as the variable part and the other parts as null.
950
951 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
952 case, we negate an operand that was subtracted. Except if it is a
953 literal for which we use *MINUS_LITP instead.
954
955 If NEGATE_P is true, we are negating all of IN, again except a literal
956 for which we use *MINUS_LITP instead.
957
958 If IN is itself a literal or constant, return it as appropriate.
959
960 Note that we do not guarantee that any of the three values will be the
961 same type as IN, but they will have the same signedness and mode. */
962
963 static tree
964 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
965 tree *minus_litp, int negate_p)
966 {
967 tree var = 0;
968
969 *conp = 0;
970 *litp = 0;
971 *minus_litp = 0;
972
973 /* Strip any conversions that don't change the machine mode or signedness. */
974 STRIP_SIGN_NOPS (in);
975
976 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
977 *litp = in;
978 else if (TREE_CODE (in) == code
979 || (! FLOAT_TYPE_P (TREE_TYPE (in))
980 /* We can associate addition and subtraction together (even
981 though the C standard doesn't say so) for integers because
982 the value is not affected. For reals, the value might be
983 affected, so we can't. */
984 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
985 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
986 {
987 tree op0 = TREE_OPERAND (in, 0);
988 tree op1 = TREE_OPERAND (in, 1);
989 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
990 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
991
992 /* First see if either of the operands is a literal, then a constant. */
993 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
994 *litp = op0, op0 = 0;
995 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
996 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
997
998 if (op0 != 0 && TREE_CONSTANT (op0))
999 *conp = op0, op0 = 0;
1000 else if (op1 != 0 && TREE_CONSTANT (op1))
1001 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1002
1003 /* If we haven't dealt with either operand, this is not a case we can
1004 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1005 if (op0 != 0 && op1 != 0)
1006 var = in;
1007 else if (op0 != 0)
1008 var = op0;
1009 else
1010 var = op1, neg_var_p = neg1_p;
1011
1012 /* Now do any needed negations. */
1013 if (neg_litp_p)
1014 *minus_litp = *litp, *litp = 0;
1015 if (neg_conp_p)
1016 *conp = negate_expr (*conp);
1017 if (neg_var_p)
1018 var = negate_expr (var);
1019 }
1020 else if (TREE_CONSTANT (in))
1021 *conp = in;
1022 else
1023 var = in;
1024
1025 if (negate_p)
1026 {
1027 if (*litp)
1028 *minus_litp = *litp, *litp = 0;
1029 else if (*minus_litp)
1030 *litp = *minus_litp, *minus_litp = 0;
1031 *conp = negate_expr (*conp);
1032 var = negate_expr (var);
1033 }
1034
1035 return var;
1036 }
1037
1038 /* Re-associate trees split by the above function. T1 and T2 are either
1039 expressions to associate or null. Return the new expression, if any. If
1040 we build an operation, do it in TYPE and with CODE. */
1041
1042 static tree
1043 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1044 {
1045 if (t1 == 0)
1046 return t2;
1047 else if (t2 == 0)
1048 return t1;
1049
1050 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1051 try to fold this since we will have infinite recursion. But do
1052 deal with any NEGATE_EXPRs. */
1053 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1054 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1055 {
1056 if (code == PLUS_EXPR)
1057 {
1058 if (TREE_CODE (t1) == NEGATE_EXPR)
1059 return build (MINUS_EXPR, type, convert (type, t2),
1060 convert (type, TREE_OPERAND (t1, 0)));
1061 else if (TREE_CODE (t2) == NEGATE_EXPR)
1062 return build (MINUS_EXPR, type, convert (type, t1),
1063 convert (type, TREE_OPERAND (t2, 0)));
1064 }
1065 return build (code, type, convert (type, t1), convert (type, t2));
1066 }
1067
1068 return fold (build (code, type, convert (type, t1), convert (type, t2)));
1069 }
1070 \f
1071 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1072 to produce a new constant.
1073
1074 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1075
1076 static tree
1077 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1078 {
1079 unsigned HOST_WIDE_INT int1l, int2l;
1080 HOST_WIDE_INT int1h, int2h;
1081 unsigned HOST_WIDE_INT low;
1082 HOST_WIDE_INT hi;
1083 unsigned HOST_WIDE_INT garbagel;
1084 HOST_WIDE_INT garbageh;
1085 tree t;
1086 tree type = TREE_TYPE (arg1);
1087 int uns = TREE_UNSIGNED (type);
1088 int is_sizetype
1089 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1090 int overflow = 0;
1091 int no_overflow = 0;
1092
1093 int1l = TREE_INT_CST_LOW (arg1);
1094 int1h = TREE_INT_CST_HIGH (arg1);
1095 int2l = TREE_INT_CST_LOW (arg2);
1096 int2h = TREE_INT_CST_HIGH (arg2);
1097
1098 switch (code)
1099 {
1100 case BIT_IOR_EXPR:
1101 low = int1l | int2l, hi = int1h | int2h;
1102 break;
1103
1104 case BIT_XOR_EXPR:
1105 low = int1l ^ int2l, hi = int1h ^ int2h;
1106 break;
1107
1108 case BIT_AND_EXPR:
1109 low = int1l & int2l, hi = int1h & int2h;
1110 break;
1111
1112 case RSHIFT_EXPR:
1113 int2l = -int2l;
1114 case LSHIFT_EXPR:
1115 /* It's unclear from the C standard whether shifts can overflow.
1116 The following code ignores overflow; perhaps a C standard
1117 interpretation ruling is needed. */
1118 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1119 &low, &hi, !uns);
1120 no_overflow = 1;
1121 break;
1122
1123 case RROTATE_EXPR:
1124 int2l = - int2l;
1125 case LROTATE_EXPR:
1126 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1127 &low, &hi);
1128 break;
1129
1130 case PLUS_EXPR:
1131 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1132 break;
1133
1134 case MINUS_EXPR:
1135 neg_double (int2l, int2h, &low, &hi);
1136 add_double (int1l, int1h, low, hi, &low, &hi);
1137 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1138 break;
1139
1140 case MULT_EXPR:
1141 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1142 break;
1143
1144 case TRUNC_DIV_EXPR:
1145 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1146 case EXACT_DIV_EXPR:
1147 /* This is a shortcut for a common special case. */
1148 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1149 && ! TREE_CONSTANT_OVERFLOW (arg1)
1150 && ! TREE_CONSTANT_OVERFLOW (arg2)
1151 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1152 {
1153 if (code == CEIL_DIV_EXPR)
1154 int1l += int2l - 1;
1155
1156 low = int1l / int2l, hi = 0;
1157 break;
1158 }
1159
1160 /* ... fall through ... */
1161
1162 case ROUND_DIV_EXPR:
1163 if (int2h == 0 && int2l == 1)
1164 {
1165 low = int1l, hi = int1h;
1166 break;
1167 }
1168 if (int1l == int2l && int1h == int2h
1169 && ! (int1l == 0 && int1h == 0))
1170 {
1171 low = 1, hi = 0;
1172 break;
1173 }
1174 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1175 &low, &hi, &garbagel, &garbageh);
1176 break;
1177
1178 case TRUNC_MOD_EXPR:
1179 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1180 /* This is a shortcut for a common special case. */
1181 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1182 && ! TREE_CONSTANT_OVERFLOW (arg1)
1183 && ! TREE_CONSTANT_OVERFLOW (arg2)
1184 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1185 {
1186 if (code == CEIL_MOD_EXPR)
1187 int1l += int2l - 1;
1188 low = int1l % int2l, hi = 0;
1189 break;
1190 }
1191
1192 /* ... fall through ... */
1193
1194 case ROUND_MOD_EXPR:
1195 overflow = div_and_round_double (code, uns,
1196 int1l, int1h, int2l, int2h,
1197 &garbagel, &garbageh, &low, &hi);
1198 break;
1199
1200 case MIN_EXPR:
1201 case MAX_EXPR:
1202 if (uns)
1203 low = (((unsigned HOST_WIDE_INT) int1h
1204 < (unsigned HOST_WIDE_INT) int2h)
1205 || (((unsigned HOST_WIDE_INT) int1h
1206 == (unsigned HOST_WIDE_INT) int2h)
1207 && int1l < int2l));
1208 else
1209 low = (int1h < int2h
1210 || (int1h == int2h && int1l < int2l));
1211
1212 if (low == (code == MIN_EXPR))
1213 low = int1l, hi = int1h;
1214 else
1215 low = int2l, hi = int2h;
1216 break;
1217
1218 default:
1219 abort ();
1220 }
1221
1222 /* If this is for a sizetype, can be represented as one (signed)
1223 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1224 constants. */
1225 if (is_sizetype
1226 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1227 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1228 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1229 return size_int_type_wide (low, type);
1230 else
1231 {
1232 t = build_int_2 (low, hi);
1233 TREE_TYPE (t) = TREE_TYPE (arg1);
1234 }
1235
1236 TREE_OVERFLOW (t)
1237 = ((notrunc
1238 ? (!uns || is_sizetype) && overflow
1239 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1240 && ! no_overflow))
1241 | TREE_OVERFLOW (arg1)
1242 | TREE_OVERFLOW (arg2));
1243
1244 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1245 So check if force_fit_type truncated the value. */
1246 if (is_sizetype
1247 && ! TREE_OVERFLOW (t)
1248 && (TREE_INT_CST_HIGH (t) != hi
1249 || TREE_INT_CST_LOW (t) != low))
1250 TREE_OVERFLOW (t) = 1;
1251
1252 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1253 | TREE_CONSTANT_OVERFLOW (arg1)
1254 | TREE_CONSTANT_OVERFLOW (arg2));
1255 return t;
1256 }
1257
1258 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1259 constant. We assume ARG1 and ARG2 have the same data type, or at least
1260 are the same kind of constant and the same machine mode.
1261
1262 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1263
1264 static tree
1265 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1266 {
1267 STRIP_NOPS (arg1);
1268 STRIP_NOPS (arg2);
1269
1270 if (TREE_CODE (arg1) == INTEGER_CST)
1271 return int_const_binop (code, arg1, arg2, notrunc);
1272
1273 if (TREE_CODE (arg1) == REAL_CST)
1274 {
1275 enum machine_mode mode;
1276 REAL_VALUE_TYPE d1;
1277 REAL_VALUE_TYPE d2;
1278 REAL_VALUE_TYPE value;
1279 tree t, type;
1280
1281 d1 = TREE_REAL_CST (arg1);
1282 d2 = TREE_REAL_CST (arg2);
1283
1284 type = TREE_TYPE (arg1);
1285 mode = TYPE_MODE (type);
1286
1287 /* Don't perform operation if we honor signaling NaNs and
1288 either operand is a NaN. */
1289 if (HONOR_SNANS (mode)
1290 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1291 return NULL_TREE;
1292
1293 /* Don't perform operation if it would raise a division
1294 by zero exception. */
1295 if (code == RDIV_EXPR
1296 && REAL_VALUES_EQUAL (d2, dconst0)
1297 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1298 return NULL_TREE;
1299
1300 /* If either operand is a NaN, just return it. Otherwise, set up
1301 for floating-point trap; we return an overflow. */
1302 if (REAL_VALUE_ISNAN (d1))
1303 return arg1;
1304 else if (REAL_VALUE_ISNAN (d2))
1305 return arg2;
1306
1307 REAL_ARITHMETIC (value, code, d1, d2);
1308
1309 t = build_real (type, real_value_truncate (mode, value));
1310
1311 TREE_OVERFLOW (t)
1312 = (force_fit_type (t, 0)
1313 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1314 TREE_CONSTANT_OVERFLOW (t)
1315 = TREE_OVERFLOW (t)
1316 | TREE_CONSTANT_OVERFLOW (arg1)
1317 | TREE_CONSTANT_OVERFLOW (arg2);
1318 return t;
1319 }
1320 if (TREE_CODE (arg1) == COMPLEX_CST)
1321 {
1322 tree type = TREE_TYPE (arg1);
1323 tree r1 = TREE_REALPART (arg1);
1324 tree i1 = TREE_IMAGPART (arg1);
1325 tree r2 = TREE_REALPART (arg2);
1326 tree i2 = TREE_IMAGPART (arg2);
1327 tree t;
1328
1329 switch (code)
1330 {
1331 case PLUS_EXPR:
1332 t = build_complex (type,
1333 const_binop (PLUS_EXPR, r1, r2, notrunc),
1334 const_binop (PLUS_EXPR, i1, i2, notrunc));
1335 break;
1336
1337 case MINUS_EXPR:
1338 t = build_complex (type,
1339 const_binop (MINUS_EXPR, r1, r2, notrunc),
1340 const_binop (MINUS_EXPR, i1, i2, notrunc));
1341 break;
1342
1343 case MULT_EXPR:
1344 t = build_complex (type,
1345 const_binop (MINUS_EXPR,
1346 const_binop (MULT_EXPR,
1347 r1, r2, notrunc),
1348 const_binop (MULT_EXPR,
1349 i1, i2, notrunc),
1350 notrunc),
1351 const_binop (PLUS_EXPR,
1352 const_binop (MULT_EXPR,
1353 r1, i2, notrunc),
1354 const_binop (MULT_EXPR,
1355 i1, r2, notrunc),
1356 notrunc));
1357 break;
1358
1359 case RDIV_EXPR:
1360 {
1361 tree magsquared
1362 = const_binop (PLUS_EXPR,
1363 const_binop (MULT_EXPR, r2, r2, notrunc),
1364 const_binop (MULT_EXPR, i2, i2, notrunc),
1365 notrunc);
1366
1367 t = build_complex (type,
1368 const_binop
1369 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1370 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1371 const_binop (PLUS_EXPR,
1372 const_binop (MULT_EXPR, r1, r2,
1373 notrunc),
1374 const_binop (MULT_EXPR, i1, i2,
1375 notrunc),
1376 notrunc),
1377 magsquared, notrunc),
1378 const_binop
1379 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1380 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1381 const_binop (MINUS_EXPR,
1382 const_binop (MULT_EXPR, i1, r2,
1383 notrunc),
1384 const_binop (MULT_EXPR, r1, i2,
1385 notrunc),
1386 notrunc),
1387 magsquared, notrunc));
1388 }
1389 break;
1390
1391 default:
1392 abort ();
1393 }
1394 return t;
1395 }
1396 return 0;
1397 }
1398
1399 /* These are the hash table functions for the hash table of INTEGER_CST
1400 nodes of a sizetype. */
1401
1402 /* Return the hash code code X, an INTEGER_CST. */
1403
1404 static hashval_t
1405 size_htab_hash (const void *x)
1406 {
1407 tree t = (tree) x;
1408
1409 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1410 ^ htab_hash_pointer (TREE_TYPE (t))
1411 ^ (TREE_OVERFLOW (t) << 20));
1412 }
1413
1414 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1415 is the same as that given by *Y, which is the same. */
1416
1417 static int
1418 size_htab_eq (const void *x, const void *y)
1419 {
1420 tree xt = (tree) x;
1421 tree yt = (tree) y;
1422
1423 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1424 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1425 && TREE_TYPE (xt) == TREE_TYPE (yt)
1426 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1427 }
1428 \f
1429 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1430 bits are given by NUMBER and of the sizetype represented by KIND. */
1431
1432 tree
1433 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1434 {
1435 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1436 }
1437
1438 /* Likewise, but the desired type is specified explicitly. */
1439
1440 static GTY (()) tree new_const;
1441 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1442 htab_t size_htab;
1443
1444 tree
1445 size_int_type_wide (HOST_WIDE_INT number, tree type)
1446 {
1447 void **slot;
1448
1449 if (size_htab == 0)
1450 {
1451 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1452 new_const = make_node (INTEGER_CST);
1453 }
1454
1455 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1456 hash table, we return the value from the hash table. Otherwise, we
1457 place that in the hash table and make a new node for the next time. */
1458 TREE_INT_CST_LOW (new_const) = number;
1459 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1460 TREE_TYPE (new_const) = type;
1461 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1462 = force_fit_type (new_const, 0);
1463
1464 slot = htab_find_slot (size_htab, new_const, INSERT);
1465 if (*slot == 0)
1466 {
1467 tree t = new_const;
1468
1469 *slot = new_const;
1470 new_const = make_node (INTEGER_CST);
1471 return t;
1472 }
1473 else
1474 return (tree) *slot;
1475 }
1476
1477 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1478 is a tree code. The type of the result is taken from the operands.
1479 Both must be the same type integer type and it must be a size type.
1480 If the operands are constant, so is the result. */
1481
1482 tree
1483 size_binop (enum tree_code code, tree arg0, tree arg1)
1484 {
1485 tree type = TREE_TYPE (arg0);
1486
1487 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1488 || type != TREE_TYPE (arg1))
1489 abort ();
1490
1491 /* Handle the special case of two integer constants faster. */
1492 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1493 {
1494 /* And some specific cases even faster than that. */
1495 if (code == PLUS_EXPR && integer_zerop (arg0))
1496 return arg1;
1497 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1498 && integer_zerop (arg1))
1499 return arg0;
1500 else if (code == MULT_EXPR && integer_onep (arg0))
1501 return arg1;
1502
1503 /* Handle general case of two integer constants. */
1504 return int_const_binop (code, arg0, arg1, 0);
1505 }
1506
1507 if (arg0 == error_mark_node || arg1 == error_mark_node)
1508 return error_mark_node;
1509
1510 return fold (build (code, type, arg0, arg1));
1511 }
1512
1513 /* Given two values, either both of sizetype or both of bitsizetype,
1514 compute the difference between the two values. Return the value
1515 in signed type corresponding to the type of the operands. */
1516
1517 tree
1518 size_diffop (tree arg0, tree arg1)
1519 {
1520 tree type = TREE_TYPE (arg0);
1521 tree ctype;
1522
1523 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1524 || type != TREE_TYPE (arg1))
1525 abort ();
1526
1527 /* If the type is already signed, just do the simple thing. */
1528 if (! TREE_UNSIGNED (type))
1529 return size_binop (MINUS_EXPR, arg0, arg1);
1530
1531 ctype = (type == bitsizetype || type == ubitsizetype
1532 ? sbitsizetype : ssizetype);
1533
1534 /* If either operand is not a constant, do the conversions to the signed
1535 type and subtract. The hardware will do the right thing with any
1536 overflow in the subtraction. */
1537 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1538 return size_binop (MINUS_EXPR, convert (ctype, arg0),
1539 convert (ctype, arg1));
1540
1541 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1542 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1543 overflow) and negate (which can't either). Special-case a result
1544 of zero while we're here. */
1545 if (tree_int_cst_equal (arg0, arg1))
1546 return convert (ctype, integer_zero_node);
1547 else if (tree_int_cst_lt (arg1, arg0))
1548 return convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1549 else
1550 return size_binop (MINUS_EXPR, convert (ctype, integer_zero_node),
1551 convert (ctype, size_binop (MINUS_EXPR, arg1, arg0)));
1552 }
1553 \f
1554
1555 /* Given T, a tree representing type conversion of ARG1, a constant,
1556 return a constant tree representing the result of conversion. */
1557
1558 static tree
1559 fold_convert (tree t, tree arg1)
1560 {
1561 tree type = TREE_TYPE (t);
1562 int overflow = 0;
1563
1564 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1565 {
1566 if (TREE_CODE (arg1) == INTEGER_CST)
1567 {
1568 /* If we would build a constant wider than GCC supports,
1569 leave the conversion unfolded. */
1570 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1571 return t;
1572
1573 /* If we are trying to make a sizetype for a small integer, use
1574 size_int to pick up cached types to reduce duplicate nodes. */
1575 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1576 && !TREE_CONSTANT_OVERFLOW (arg1)
1577 && compare_tree_int (arg1, 10000) < 0)
1578 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1579
1580 /* Given an integer constant, make new constant with new type,
1581 appropriately sign-extended or truncated. */
1582 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1583 TREE_INT_CST_HIGH (arg1));
1584 TREE_TYPE (t) = type;
1585 /* Indicate an overflow if (1) ARG1 already overflowed,
1586 or (2) force_fit_type indicates an overflow.
1587 Tell force_fit_type that an overflow has already occurred
1588 if ARG1 is a too-large unsigned value and T is signed.
1589 But don't indicate an overflow if converting a pointer. */
1590 TREE_OVERFLOW (t)
1591 = ((force_fit_type (t,
1592 (TREE_INT_CST_HIGH (arg1) < 0
1593 && (TREE_UNSIGNED (type)
1594 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1595 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1596 || TREE_OVERFLOW (arg1));
1597 TREE_CONSTANT_OVERFLOW (t)
1598 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1599 }
1600 else if (TREE_CODE (arg1) == REAL_CST)
1601 {
1602 /* The following code implements the floating point to integer
1603 conversion rules required by the Java Language Specification,
1604 that IEEE NaNs are mapped to zero and values that overflow
1605 the target precision saturate, i.e. values greater than
1606 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1607 are mapped to INT_MIN. These semantics are allowed by the
1608 C and C++ standards that simply state that the behavior of
1609 FP-to-integer conversion is unspecified upon overflow. */
1610
1611 HOST_WIDE_INT high, low;
1612
1613 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1614 /* If x is NaN, return zero and show we have an overflow. */
1615 if (REAL_VALUE_ISNAN (x))
1616 {
1617 overflow = 1;
1618 high = 0;
1619 low = 0;
1620 }
1621
1622 /* See if X will be in range after truncation towards 0.
1623 To compensate for truncation, move the bounds away from 0,
1624 but reject if X exactly equals the adjusted bounds. */
1625
1626 if (! overflow)
1627 {
1628 tree lt = TYPE_MIN_VALUE (type);
1629 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1630 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1631 if (! REAL_VALUES_LESS (l, x))
1632 {
1633 overflow = 1;
1634 high = TREE_INT_CST_HIGH (lt);
1635 low = TREE_INT_CST_LOW (lt);
1636 }
1637 }
1638
1639 if (! overflow)
1640 {
1641 tree ut = TYPE_MAX_VALUE (type);
1642 if (ut)
1643 {
1644 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1645 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1646 if (! REAL_VALUES_LESS (x, u))
1647 {
1648 overflow = 1;
1649 high = TREE_INT_CST_HIGH (ut);
1650 low = TREE_INT_CST_LOW (ut);
1651 }
1652 }
1653 }
1654
1655 if (! overflow)
1656 REAL_VALUE_TO_INT (&low, &high, x);
1657
1658 t = build_int_2 (low, high);
1659 TREE_TYPE (t) = type;
1660 TREE_OVERFLOW (t)
1661 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1662 TREE_CONSTANT_OVERFLOW (t)
1663 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1664 }
1665 TREE_TYPE (t) = type;
1666 }
1667 else if (TREE_CODE (type) == REAL_TYPE)
1668 {
1669 if (TREE_CODE (arg1) == INTEGER_CST)
1670 return build_real_from_int_cst (type, arg1);
1671 if (TREE_CODE (arg1) == REAL_CST)
1672 {
1673 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1674 {
1675 /* We make a copy of ARG1 so that we don't modify an
1676 existing constant tree. */
1677 t = copy_node (arg1);
1678 TREE_TYPE (t) = type;
1679 return t;
1680 }
1681
1682 t = build_real (type,
1683 real_value_truncate (TYPE_MODE (type),
1684 TREE_REAL_CST (arg1)));
1685
1686 TREE_OVERFLOW (t)
1687 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1688 TREE_CONSTANT_OVERFLOW (t)
1689 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1690 return t;
1691 }
1692 }
1693 TREE_CONSTANT (t) = 1;
1694 return t;
1695 }
1696 \f
1697 /* Return an expr equal to X but certainly not valid as an lvalue. */
1698
1699 tree
1700 non_lvalue (tree x)
1701 {
1702 tree result;
1703
1704 /* These things are certainly not lvalues. */
1705 if (TREE_CODE (x) == NON_LVALUE_EXPR
1706 || TREE_CODE (x) == INTEGER_CST
1707 || TREE_CODE (x) == REAL_CST
1708 || TREE_CODE (x) == STRING_CST
1709 || TREE_CODE (x) == ADDR_EXPR)
1710 return x;
1711
1712 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1713 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1714 return result;
1715 }
1716
1717 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1718 Zero means allow extended lvalues. */
1719
1720 int pedantic_lvalues;
1721
1722 /* When pedantic, return an expr equal to X but certainly not valid as a
1723 pedantic lvalue. Otherwise, return X. */
1724
1725 tree
1726 pedantic_non_lvalue (tree x)
1727 {
1728 if (pedantic_lvalues)
1729 return non_lvalue (x);
1730 else
1731 return x;
1732 }
1733 \f
1734 /* Given a tree comparison code, return the code that is the logical inverse
1735 of the given code. It is not safe to do this for floating-point
1736 comparisons, except for NE_EXPR and EQ_EXPR. */
1737
1738 static enum tree_code
1739 invert_tree_comparison (enum tree_code code)
1740 {
1741 switch (code)
1742 {
1743 case EQ_EXPR:
1744 return NE_EXPR;
1745 case NE_EXPR:
1746 return EQ_EXPR;
1747 case GT_EXPR:
1748 return LE_EXPR;
1749 case GE_EXPR:
1750 return LT_EXPR;
1751 case LT_EXPR:
1752 return GE_EXPR;
1753 case LE_EXPR:
1754 return GT_EXPR;
1755 default:
1756 abort ();
1757 }
1758 }
1759
1760 /* Similar, but return the comparison that results if the operands are
1761 swapped. This is safe for floating-point. */
1762
1763 static enum tree_code
1764 swap_tree_comparison (enum tree_code code)
1765 {
1766 switch (code)
1767 {
1768 case EQ_EXPR:
1769 case NE_EXPR:
1770 return code;
1771 case GT_EXPR:
1772 return LT_EXPR;
1773 case GE_EXPR:
1774 return LE_EXPR;
1775 case LT_EXPR:
1776 return GT_EXPR;
1777 case LE_EXPR:
1778 return GE_EXPR;
1779 default:
1780 abort ();
1781 }
1782 }
1783
1784
1785 /* Convert a comparison tree code from an enum tree_code representation
1786 into a compcode bit-based encoding. This function is the inverse of
1787 compcode_to_comparison. */
1788
1789 static int
1790 comparison_to_compcode (enum tree_code code)
1791 {
1792 switch (code)
1793 {
1794 case LT_EXPR:
1795 return COMPCODE_LT;
1796 case EQ_EXPR:
1797 return COMPCODE_EQ;
1798 case LE_EXPR:
1799 return COMPCODE_LE;
1800 case GT_EXPR:
1801 return COMPCODE_GT;
1802 case NE_EXPR:
1803 return COMPCODE_NE;
1804 case GE_EXPR:
1805 return COMPCODE_GE;
1806 default:
1807 abort ();
1808 }
1809 }
1810
1811 /* Convert a compcode bit-based encoding of a comparison operator back
1812 to GCC's enum tree_code representation. This function is the
1813 inverse of comparison_to_compcode. */
1814
1815 static enum tree_code
1816 compcode_to_comparison (int code)
1817 {
1818 switch (code)
1819 {
1820 case COMPCODE_LT:
1821 return LT_EXPR;
1822 case COMPCODE_EQ:
1823 return EQ_EXPR;
1824 case COMPCODE_LE:
1825 return LE_EXPR;
1826 case COMPCODE_GT:
1827 return GT_EXPR;
1828 case COMPCODE_NE:
1829 return NE_EXPR;
1830 case COMPCODE_GE:
1831 return GE_EXPR;
1832 default:
1833 abort ();
1834 }
1835 }
1836
1837 /* Return nonzero if CODE is a tree code that represents a truth value. */
1838
1839 static int
1840 truth_value_p (enum tree_code code)
1841 {
1842 return (TREE_CODE_CLASS (code) == '<'
1843 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1844 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1845 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1846 }
1847 \f
1848 /* Return nonzero if two operands (typically of the same tree node)
1849 are necessarily equal. If either argument has side-effects this
1850 function returns zero.
1851
1852 If ONLY_CONST is nonzero, only return nonzero for constants.
1853 This function tests whether the operands are indistinguishable;
1854 it does not test whether they are equal using C's == operation.
1855 The distinction is important for IEEE floating point, because
1856 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1857 (2) two NaNs may be indistinguishable, but NaN!=NaN.
1858
1859 If ONLY_CONST is zero, a VAR_DECL is considered equal to itself
1860 even though it may hold multiple values during a function.
1861 This is because a GCC tree node guarantees that nothing else is
1862 executed between the evaluation of its "operands" (which may often
1863 be evaluated in arbitrary order). Hence if the operands themselves
1864 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
1865 same value in each operand/subexpression. Hence a zero value for
1866 ONLY_CONST assumes isochronic (or instantaneous) tree equivalence.
1867 If comparing arbitrary expression trees, such as from different
1868 statements, ONLY_CONST must usually be nonzero. */
1869
1870 int
1871 operand_equal_p (tree arg0, tree arg1, int only_const)
1872 {
1873 tree fndecl;
1874
1875 /* If both types don't have the same signedness, then we can't consider
1876 them equal. We must check this before the STRIP_NOPS calls
1877 because they may change the signedness of the arguments. */
1878 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1879 return 0;
1880
1881 STRIP_NOPS (arg0);
1882 STRIP_NOPS (arg1);
1883
1884 if (TREE_CODE (arg0) != TREE_CODE (arg1)
1885 /* This is needed for conversions and for COMPONENT_REF.
1886 Might as well play it safe and always test this. */
1887 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
1888 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
1889 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1890 return 0;
1891
1892 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1893 We don't care about side effects in that case because the SAVE_EXPR
1894 takes care of that for us. In all other cases, two expressions are
1895 equal if they have no side effects. If we have two identical
1896 expressions with side effects that should be treated the same due
1897 to the only side effects being identical SAVE_EXPR's, that will
1898 be detected in the recursive calls below. */
1899 if (arg0 == arg1 && ! only_const
1900 && (TREE_CODE (arg0) == SAVE_EXPR
1901 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
1902 return 1;
1903
1904 /* Next handle constant cases, those for which we can return 1 even
1905 if ONLY_CONST is set. */
1906 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
1907 switch (TREE_CODE (arg0))
1908 {
1909 case INTEGER_CST:
1910 return (! TREE_CONSTANT_OVERFLOW (arg0)
1911 && ! TREE_CONSTANT_OVERFLOW (arg1)
1912 && tree_int_cst_equal (arg0, arg1));
1913
1914 case REAL_CST:
1915 return (! TREE_CONSTANT_OVERFLOW (arg0)
1916 && ! TREE_CONSTANT_OVERFLOW (arg1)
1917 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
1918 TREE_REAL_CST (arg1)));
1919
1920 case VECTOR_CST:
1921 {
1922 tree v1, v2;
1923
1924 if (TREE_CONSTANT_OVERFLOW (arg0)
1925 || TREE_CONSTANT_OVERFLOW (arg1))
1926 return 0;
1927
1928 v1 = TREE_VECTOR_CST_ELTS (arg0);
1929 v2 = TREE_VECTOR_CST_ELTS (arg1);
1930 while (v1 && v2)
1931 {
1932 if (!operand_equal_p (v1, v2, only_const))
1933 return 0;
1934 v1 = TREE_CHAIN (v1);
1935 v2 = TREE_CHAIN (v2);
1936 }
1937
1938 return 1;
1939 }
1940
1941 case COMPLEX_CST:
1942 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
1943 only_const)
1944 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
1945 only_const));
1946
1947 case STRING_CST:
1948 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
1949 && ! memcmp (TREE_STRING_POINTER (arg0),
1950 TREE_STRING_POINTER (arg1),
1951 TREE_STRING_LENGTH (arg0)));
1952
1953 case ADDR_EXPR:
1954 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
1955 0);
1956 default:
1957 break;
1958 }
1959
1960 if (only_const)
1961 return 0;
1962
1963 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1964 {
1965 case '1':
1966 /* Two conversions are equal only if signedness and modes match. */
1967 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1968 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1969 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1970 return 0;
1971
1972 return operand_equal_p (TREE_OPERAND (arg0, 0),
1973 TREE_OPERAND (arg1, 0), 0);
1974
1975 case '<':
1976 case '2':
1977 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
1978 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
1979 0))
1980 return 1;
1981
1982 /* For commutative ops, allow the other order. */
1983 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
1984 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
1985 || TREE_CODE (arg0) == BIT_IOR_EXPR
1986 || TREE_CODE (arg0) == BIT_XOR_EXPR
1987 || TREE_CODE (arg0) == BIT_AND_EXPR
1988 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
1989 && operand_equal_p (TREE_OPERAND (arg0, 0),
1990 TREE_OPERAND (arg1, 1), 0)
1991 && operand_equal_p (TREE_OPERAND (arg0, 1),
1992 TREE_OPERAND (arg1, 0), 0));
1993
1994 case 'r':
1995 /* If either of the pointer (or reference) expressions we are
1996 dereferencing contain a side effect, these cannot be equal. */
1997 if (TREE_SIDE_EFFECTS (arg0)
1998 || TREE_SIDE_EFFECTS (arg1))
1999 return 0;
2000
2001 switch (TREE_CODE (arg0))
2002 {
2003 case INDIRECT_REF:
2004 return operand_equal_p (TREE_OPERAND (arg0, 0),
2005 TREE_OPERAND (arg1, 0), 0);
2006
2007 case COMPONENT_REF:
2008 case ARRAY_REF:
2009 case ARRAY_RANGE_REF:
2010 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2011 TREE_OPERAND (arg1, 0), 0)
2012 && operand_equal_p (TREE_OPERAND (arg0, 1),
2013 TREE_OPERAND (arg1, 1), 0));
2014
2015 case BIT_FIELD_REF:
2016 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2017 TREE_OPERAND (arg1, 0), 0)
2018 && operand_equal_p (TREE_OPERAND (arg0, 1),
2019 TREE_OPERAND (arg1, 1), 0)
2020 && operand_equal_p (TREE_OPERAND (arg0, 2),
2021 TREE_OPERAND (arg1, 2), 0));
2022 default:
2023 return 0;
2024 }
2025
2026 case 'e':
2027 switch (TREE_CODE (arg0))
2028 {
2029 case ADDR_EXPR:
2030 case TRUTH_NOT_EXPR:
2031 return operand_equal_p (TREE_OPERAND (arg0, 0),
2032 TREE_OPERAND (arg1, 0), 0);
2033
2034 case RTL_EXPR:
2035 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2036
2037 case CALL_EXPR:
2038 /* If the CALL_EXPRs call different functions, then they
2039 clearly can not be equal. */
2040 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2041 TREE_OPERAND (arg1, 0), 0))
2042 return 0;
2043
2044 /* Only consider const functions equivalent. */
2045 fndecl = get_callee_fndecl (arg0);
2046 if (fndecl == NULL_TREE
2047 || ! (flags_from_decl_or_type (fndecl) & ECF_CONST))
2048 return 0;
2049
2050 /* Now see if all the arguments are the same. operand_equal_p
2051 does not handle TREE_LIST, so we walk the operands here
2052 feeding them to operand_equal_p. */
2053 arg0 = TREE_OPERAND (arg0, 1);
2054 arg1 = TREE_OPERAND (arg1, 1);
2055 while (arg0 && arg1)
2056 {
2057 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), 0))
2058 return 0;
2059
2060 arg0 = TREE_CHAIN (arg0);
2061 arg1 = TREE_CHAIN (arg1);
2062 }
2063
2064 /* If we get here and both argument lists are exhausted
2065 then the CALL_EXPRs are equal. */
2066 return ! (arg0 || arg1);
2067
2068 default:
2069 return 0;
2070 }
2071
2072 case 'd':
2073 /* Consider __builtin_sqrt equal to sqrt. */
2074 return TREE_CODE (arg0) == FUNCTION_DECL
2075 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2076 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2077 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1);
2078
2079 default:
2080 return 0;
2081 }
2082 }
2083 \f
2084 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2085 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2086
2087 When in doubt, return 0. */
2088
2089 static int
2090 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2091 {
2092 int unsignedp1, unsignedpo;
2093 tree primarg0, primarg1, primother;
2094 unsigned int correct_width;
2095
2096 if (operand_equal_p (arg0, arg1, 0))
2097 return 1;
2098
2099 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2100 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2101 return 0;
2102
2103 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2104 and see if the inner values are the same. This removes any
2105 signedness comparison, which doesn't matter here. */
2106 primarg0 = arg0, primarg1 = arg1;
2107 STRIP_NOPS (primarg0);
2108 STRIP_NOPS (primarg1);
2109 if (operand_equal_p (primarg0, primarg1, 0))
2110 return 1;
2111
2112 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2113 actual comparison operand, ARG0.
2114
2115 First throw away any conversions to wider types
2116 already present in the operands. */
2117
2118 primarg1 = get_narrower (arg1, &unsignedp1);
2119 primother = get_narrower (other, &unsignedpo);
2120
2121 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2122 if (unsignedp1 == unsignedpo
2123 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2124 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2125 {
2126 tree type = TREE_TYPE (arg0);
2127
2128 /* Make sure shorter operand is extended the right way
2129 to match the longer operand. */
2130 primarg1 = convert ((*lang_hooks.types.signed_or_unsigned_type)
2131 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2132
2133 if (operand_equal_p (arg0, convert (type, primarg1), 0))
2134 return 1;
2135 }
2136
2137 return 0;
2138 }
2139 \f
2140 /* See if ARG is an expression that is either a comparison or is performing
2141 arithmetic on comparisons. The comparisons must only be comparing
2142 two different values, which will be stored in *CVAL1 and *CVAL2; if
2143 they are nonzero it means that some operands have already been found.
2144 No variables may be used anywhere else in the expression except in the
2145 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2146 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2147
2148 If this is true, return 1. Otherwise, return zero. */
2149
2150 static int
2151 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2152 {
2153 enum tree_code code = TREE_CODE (arg);
2154 char class = TREE_CODE_CLASS (code);
2155
2156 /* We can handle some of the 'e' cases here. */
2157 if (class == 'e' && code == TRUTH_NOT_EXPR)
2158 class = '1';
2159 else if (class == 'e'
2160 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2161 || code == COMPOUND_EXPR))
2162 class = '2';
2163
2164 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2165 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2166 {
2167 /* If we've already found a CVAL1 or CVAL2, this expression is
2168 two complex to handle. */
2169 if (*cval1 || *cval2)
2170 return 0;
2171
2172 class = '1';
2173 *save_p = 1;
2174 }
2175
2176 switch (class)
2177 {
2178 case '1':
2179 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2180
2181 case '2':
2182 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2183 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2184 cval1, cval2, save_p));
2185
2186 case 'c':
2187 return 1;
2188
2189 case 'e':
2190 if (code == COND_EXPR)
2191 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2192 cval1, cval2, save_p)
2193 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2194 cval1, cval2, save_p)
2195 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2196 cval1, cval2, save_p));
2197 return 0;
2198
2199 case '<':
2200 /* First see if we can handle the first operand, then the second. For
2201 the second operand, we know *CVAL1 can't be zero. It must be that
2202 one side of the comparison is each of the values; test for the
2203 case where this isn't true by failing if the two operands
2204 are the same. */
2205
2206 if (operand_equal_p (TREE_OPERAND (arg, 0),
2207 TREE_OPERAND (arg, 1), 0))
2208 return 0;
2209
2210 if (*cval1 == 0)
2211 *cval1 = TREE_OPERAND (arg, 0);
2212 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2213 ;
2214 else if (*cval2 == 0)
2215 *cval2 = TREE_OPERAND (arg, 0);
2216 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2217 ;
2218 else
2219 return 0;
2220
2221 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2222 ;
2223 else if (*cval2 == 0)
2224 *cval2 = TREE_OPERAND (arg, 1);
2225 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2226 ;
2227 else
2228 return 0;
2229
2230 return 1;
2231
2232 default:
2233 return 0;
2234 }
2235 }
2236 \f
2237 /* ARG is a tree that is known to contain just arithmetic operations and
2238 comparisons. Evaluate the operations in the tree substituting NEW0 for
2239 any occurrence of OLD0 as an operand of a comparison and likewise for
2240 NEW1 and OLD1. */
2241
2242 static tree
2243 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2244 {
2245 tree type = TREE_TYPE (arg);
2246 enum tree_code code = TREE_CODE (arg);
2247 char class = TREE_CODE_CLASS (code);
2248
2249 /* We can handle some of the 'e' cases here. */
2250 if (class == 'e' && code == TRUTH_NOT_EXPR)
2251 class = '1';
2252 else if (class == 'e'
2253 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2254 class = '2';
2255
2256 switch (class)
2257 {
2258 case '1':
2259 return fold (build1 (code, type,
2260 eval_subst (TREE_OPERAND (arg, 0),
2261 old0, new0, old1, new1)));
2262
2263 case '2':
2264 return fold (build (code, type,
2265 eval_subst (TREE_OPERAND (arg, 0),
2266 old0, new0, old1, new1),
2267 eval_subst (TREE_OPERAND (arg, 1),
2268 old0, new0, old1, new1)));
2269
2270 case 'e':
2271 switch (code)
2272 {
2273 case SAVE_EXPR:
2274 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2275
2276 case COMPOUND_EXPR:
2277 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2278
2279 case COND_EXPR:
2280 return fold (build (code, type,
2281 eval_subst (TREE_OPERAND (arg, 0),
2282 old0, new0, old1, new1),
2283 eval_subst (TREE_OPERAND (arg, 1),
2284 old0, new0, old1, new1),
2285 eval_subst (TREE_OPERAND (arg, 2),
2286 old0, new0, old1, new1)));
2287 default:
2288 break;
2289 }
2290 /* Fall through - ??? */
2291
2292 case '<':
2293 {
2294 tree arg0 = TREE_OPERAND (arg, 0);
2295 tree arg1 = TREE_OPERAND (arg, 1);
2296
2297 /* We need to check both for exact equality and tree equality. The
2298 former will be true if the operand has a side-effect. In that
2299 case, we know the operand occurred exactly once. */
2300
2301 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2302 arg0 = new0;
2303 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2304 arg0 = new1;
2305
2306 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2307 arg1 = new0;
2308 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2309 arg1 = new1;
2310
2311 return fold (build (code, type, arg0, arg1));
2312 }
2313
2314 default:
2315 return arg;
2316 }
2317 }
2318 \f
2319 /* Return a tree for the case when the result of an expression is RESULT
2320 converted to TYPE and OMITTED was previously an operand of the expression
2321 but is now not needed (e.g., we folded OMITTED * 0).
2322
2323 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2324 the conversion of RESULT to TYPE. */
2325
2326 tree
2327 omit_one_operand (tree type, tree result, tree omitted)
2328 {
2329 tree t = convert (type, result);
2330
2331 if (TREE_SIDE_EFFECTS (omitted))
2332 return build (COMPOUND_EXPR, type, omitted, t);
2333
2334 return non_lvalue (t);
2335 }
2336
2337 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2338
2339 static tree
2340 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2341 {
2342 tree t = convert (type, result);
2343
2344 if (TREE_SIDE_EFFECTS (omitted))
2345 return build (COMPOUND_EXPR, type, omitted, t);
2346
2347 return pedantic_non_lvalue (t);
2348 }
2349 \f
2350 /* Return a simplified tree node for the truth-negation of ARG. This
2351 never alters ARG itself. We assume that ARG is an operation that
2352 returns a truth value (0 or 1). */
2353
2354 tree
2355 invert_truthvalue (tree arg)
2356 {
2357 tree type = TREE_TYPE (arg);
2358 enum tree_code code = TREE_CODE (arg);
2359
2360 if (code == ERROR_MARK)
2361 return arg;
2362
2363 /* If this is a comparison, we can simply invert it, except for
2364 floating-point non-equality comparisons, in which case we just
2365 enclose a TRUTH_NOT_EXPR around what we have. */
2366
2367 if (TREE_CODE_CLASS (code) == '<')
2368 {
2369 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2370 && !flag_unsafe_math_optimizations
2371 && code != NE_EXPR
2372 && code != EQ_EXPR)
2373 return build1 (TRUTH_NOT_EXPR, type, arg);
2374 else
2375 return build (invert_tree_comparison (code), type,
2376 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2377 }
2378
2379 switch (code)
2380 {
2381 case INTEGER_CST:
2382 return convert (type, build_int_2 (integer_zerop (arg), 0));
2383
2384 case TRUTH_AND_EXPR:
2385 return build (TRUTH_OR_EXPR, type,
2386 invert_truthvalue (TREE_OPERAND (arg, 0)),
2387 invert_truthvalue (TREE_OPERAND (arg, 1)));
2388
2389 case TRUTH_OR_EXPR:
2390 return build (TRUTH_AND_EXPR, type,
2391 invert_truthvalue (TREE_OPERAND (arg, 0)),
2392 invert_truthvalue (TREE_OPERAND (arg, 1)));
2393
2394 case TRUTH_XOR_EXPR:
2395 /* Here we can invert either operand. We invert the first operand
2396 unless the second operand is a TRUTH_NOT_EXPR in which case our
2397 result is the XOR of the first operand with the inside of the
2398 negation of the second operand. */
2399
2400 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2401 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2402 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2403 else
2404 return build (TRUTH_XOR_EXPR, type,
2405 invert_truthvalue (TREE_OPERAND (arg, 0)),
2406 TREE_OPERAND (arg, 1));
2407
2408 case TRUTH_ANDIF_EXPR:
2409 return build (TRUTH_ORIF_EXPR, type,
2410 invert_truthvalue (TREE_OPERAND (arg, 0)),
2411 invert_truthvalue (TREE_OPERAND (arg, 1)));
2412
2413 case TRUTH_ORIF_EXPR:
2414 return build (TRUTH_ANDIF_EXPR, type,
2415 invert_truthvalue (TREE_OPERAND (arg, 0)),
2416 invert_truthvalue (TREE_OPERAND (arg, 1)));
2417
2418 case TRUTH_NOT_EXPR:
2419 return TREE_OPERAND (arg, 0);
2420
2421 case COND_EXPR:
2422 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2423 invert_truthvalue (TREE_OPERAND (arg, 1)),
2424 invert_truthvalue (TREE_OPERAND (arg, 2)));
2425
2426 case COMPOUND_EXPR:
2427 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2428 invert_truthvalue (TREE_OPERAND (arg, 1)));
2429
2430 case WITH_RECORD_EXPR:
2431 return build (WITH_RECORD_EXPR, type,
2432 invert_truthvalue (TREE_OPERAND (arg, 0)),
2433 TREE_OPERAND (arg, 1));
2434
2435 case NON_LVALUE_EXPR:
2436 return invert_truthvalue (TREE_OPERAND (arg, 0));
2437
2438 case NOP_EXPR:
2439 case CONVERT_EXPR:
2440 case FLOAT_EXPR:
2441 return build1 (TREE_CODE (arg), type,
2442 invert_truthvalue (TREE_OPERAND (arg, 0)));
2443
2444 case BIT_AND_EXPR:
2445 if (!integer_onep (TREE_OPERAND (arg, 1)))
2446 break;
2447 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2448
2449 case SAVE_EXPR:
2450 return build1 (TRUTH_NOT_EXPR, type, arg);
2451
2452 case CLEANUP_POINT_EXPR:
2453 return build1 (CLEANUP_POINT_EXPR, type,
2454 invert_truthvalue (TREE_OPERAND (arg, 0)));
2455
2456 default:
2457 break;
2458 }
2459 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2460 abort ();
2461 return build1 (TRUTH_NOT_EXPR, type, arg);
2462 }
2463
2464 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2465 operands are another bit-wise operation with a common input. If so,
2466 distribute the bit operations to save an operation and possibly two if
2467 constants are involved. For example, convert
2468 (A | B) & (A | C) into A | (B & C)
2469 Further simplification will occur if B and C are constants.
2470
2471 If this optimization cannot be done, 0 will be returned. */
2472
2473 static tree
2474 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2475 {
2476 tree common;
2477 tree left, right;
2478
2479 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2480 || TREE_CODE (arg0) == code
2481 || (TREE_CODE (arg0) != BIT_AND_EXPR
2482 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2483 return 0;
2484
2485 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2486 {
2487 common = TREE_OPERAND (arg0, 0);
2488 left = TREE_OPERAND (arg0, 1);
2489 right = TREE_OPERAND (arg1, 1);
2490 }
2491 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2492 {
2493 common = TREE_OPERAND (arg0, 0);
2494 left = TREE_OPERAND (arg0, 1);
2495 right = TREE_OPERAND (arg1, 0);
2496 }
2497 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2498 {
2499 common = TREE_OPERAND (arg0, 1);
2500 left = TREE_OPERAND (arg0, 0);
2501 right = TREE_OPERAND (arg1, 1);
2502 }
2503 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2504 {
2505 common = TREE_OPERAND (arg0, 1);
2506 left = TREE_OPERAND (arg0, 0);
2507 right = TREE_OPERAND (arg1, 0);
2508 }
2509 else
2510 return 0;
2511
2512 return fold (build (TREE_CODE (arg0), type, common,
2513 fold (build (code, type, left, right))));
2514 }
2515 \f
2516 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2517 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2518
2519 static tree
2520 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
2521 int unsignedp)
2522 {
2523 tree result = build (BIT_FIELD_REF, type, inner,
2524 size_int (bitsize), bitsize_int (bitpos));
2525
2526 TREE_UNSIGNED (result) = unsignedp;
2527
2528 return result;
2529 }
2530
2531 /* Optimize a bit-field compare.
2532
2533 There are two cases: First is a compare against a constant and the
2534 second is a comparison of two items where the fields are at the same
2535 bit position relative to the start of a chunk (byte, halfword, word)
2536 large enough to contain it. In these cases we can avoid the shift
2537 implicit in bitfield extractions.
2538
2539 For constants, we emit a compare of the shifted constant with the
2540 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2541 compared. For two fields at the same position, we do the ANDs with the
2542 similar mask and compare the result of the ANDs.
2543
2544 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2545 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2546 are the left and right operands of the comparison, respectively.
2547
2548 If the optimization described above can be done, we return the resulting
2549 tree. Otherwise we return zero. */
2550
2551 static tree
2552 optimize_bit_field_compare (enum tree_code code, tree compare_type,
2553 tree lhs, tree rhs)
2554 {
2555 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2556 tree type = TREE_TYPE (lhs);
2557 tree signed_type, unsigned_type;
2558 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2559 enum machine_mode lmode, rmode, nmode;
2560 int lunsignedp, runsignedp;
2561 int lvolatilep = 0, rvolatilep = 0;
2562 tree linner, rinner = NULL_TREE;
2563 tree mask;
2564 tree offset;
2565
2566 /* Get all the information about the extractions being done. If the bit size
2567 if the same as the size of the underlying object, we aren't doing an
2568 extraction at all and so can do nothing. We also don't want to
2569 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2570 then will no longer be able to replace it. */
2571 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2572 &lunsignedp, &lvolatilep);
2573 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2574 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2575 return 0;
2576
2577 if (!const_p)
2578 {
2579 /* If this is not a constant, we can only do something if bit positions,
2580 sizes, and signedness are the same. */
2581 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2582 &runsignedp, &rvolatilep);
2583
2584 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2585 || lunsignedp != runsignedp || offset != 0
2586 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2587 return 0;
2588 }
2589
2590 /* See if we can find a mode to refer to this field. We should be able to,
2591 but fail if we can't. */
2592 nmode = get_best_mode (lbitsize, lbitpos,
2593 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2594 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2595 TYPE_ALIGN (TREE_TYPE (rinner))),
2596 word_mode, lvolatilep || rvolatilep);
2597 if (nmode == VOIDmode)
2598 return 0;
2599
2600 /* Set signed and unsigned types of the precision of this mode for the
2601 shifts below. */
2602 signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2603 unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2604
2605 /* Compute the bit position and size for the new reference and our offset
2606 within it. If the new reference is the same size as the original, we
2607 won't optimize anything, so return zero. */
2608 nbitsize = GET_MODE_BITSIZE (nmode);
2609 nbitpos = lbitpos & ~ (nbitsize - 1);
2610 lbitpos -= nbitpos;
2611 if (nbitsize == lbitsize)
2612 return 0;
2613
2614 if (BYTES_BIG_ENDIAN)
2615 lbitpos = nbitsize - lbitsize - lbitpos;
2616
2617 /* Make the mask to be used against the extracted field. */
2618 mask = build_int_2 (~0, ~0);
2619 TREE_TYPE (mask) = unsigned_type;
2620 force_fit_type (mask, 0);
2621 mask = convert (unsigned_type, mask);
2622 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2623 mask = const_binop (RSHIFT_EXPR, mask,
2624 size_int (nbitsize - lbitsize - lbitpos), 0);
2625
2626 if (! const_p)
2627 /* If not comparing with constant, just rework the comparison
2628 and return. */
2629 return build (code, compare_type,
2630 build (BIT_AND_EXPR, unsigned_type,
2631 make_bit_field_ref (linner, unsigned_type,
2632 nbitsize, nbitpos, 1),
2633 mask),
2634 build (BIT_AND_EXPR, unsigned_type,
2635 make_bit_field_ref (rinner, unsigned_type,
2636 nbitsize, nbitpos, 1),
2637 mask));
2638
2639 /* Otherwise, we are handling the constant case. See if the constant is too
2640 big for the field. Warn and return a tree of for 0 (false) if so. We do
2641 this not only for its own sake, but to avoid having to test for this
2642 error case below. If we didn't, we might generate wrong code.
2643
2644 For unsigned fields, the constant shifted right by the field length should
2645 be all zero. For signed fields, the high-order bits should agree with
2646 the sign bit. */
2647
2648 if (lunsignedp)
2649 {
2650 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2651 convert (unsigned_type, rhs),
2652 size_int (lbitsize), 0)))
2653 {
2654 warning ("comparison is always %d due to width of bit-field",
2655 code == NE_EXPR);
2656 return convert (compare_type,
2657 (code == NE_EXPR
2658 ? integer_one_node : integer_zero_node));
2659 }
2660 }
2661 else
2662 {
2663 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2664 size_int (lbitsize - 1), 0);
2665 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2666 {
2667 warning ("comparison is always %d due to width of bit-field",
2668 code == NE_EXPR);
2669 return convert (compare_type,
2670 (code == NE_EXPR
2671 ? integer_one_node : integer_zero_node));
2672 }
2673 }
2674
2675 /* Single-bit compares should always be against zero. */
2676 if (lbitsize == 1 && ! integer_zerop (rhs))
2677 {
2678 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2679 rhs = convert (type, integer_zero_node);
2680 }
2681
2682 /* Make a new bitfield reference, shift the constant over the
2683 appropriate number of bits and mask it with the computed mask
2684 (in case this was a signed field). If we changed it, make a new one. */
2685 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2686 if (lvolatilep)
2687 {
2688 TREE_SIDE_EFFECTS (lhs) = 1;
2689 TREE_THIS_VOLATILE (lhs) = 1;
2690 }
2691
2692 rhs = fold (const_binop (BIT_AND_EXPR,
2693 const_binop (LSHIFT_EXPR,
2694 convert (unsigned_type, rhs),
2695 size_int (lbitpos), 0),
2696 mask, 0));
2697
2698 return build (code, compare_type,
2699 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2700 rhs);
2701 }
2702 \f
2703 /* Subroutine for fold_truthop: decode a field reference.
2704
2705 If EXP is a comparison reference, we return the innermost reference.
2706
2707 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2708 set to the starting bit number.
2709
2710 If the innermost field can be completely contained in a mode-sized
2711 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2712
2713 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2714 otherwise it is not changed.
2715
2716 *PUNSIGNEDP is set to the signedness of the field.
2717
2718 *PMASK is set to the mask used. This is either contained in a
2719 BIT_AND_EXPR or derived from the width of the field.
2720
2721 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2722
2723 Return 0 if this is not a component reference or is one that we can't
2724 do anything with. */
2725
2726 static tree
2727 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
2728 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
2729 int *punsignedp, int *pvolatilep,
2730 tree *pmask, tree *pand_mask)
2731 {
2732 tree outer_type = 0;
2733 tree and_mask = 0;
2734 tree mask, inner, offset;
2735 tree unsigned_type;
2736 unsigned int precision;
2737
2738 /* All the optimizations using this function assume integer fields.
2739 There are problems with FP fields since the type_for_size call
2740 below can fail for, e.g., XFmode. */
2741 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2742 return 0;
2743
2744 /* We are interested in the bare arrangement of bits, so strip everything
2745 that doesn't affect the machine mode. However, record the type of the
2746 outermost expression if it may matter below. */
2747 if (TREE_CODE (exp) == NOP_EXPR
2748 || TREE_CODE (exp) == CONVERT_EXPR
2749 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2750 outer_type = TREE_TYPE (exp);
2751 STRIP_NOPS (exp);
2752
2753 if (TREE_CODE (exp) == BIT_AND_EXPR)
2754 {
2755 and_mask = TREE_OPERAND (exp, 1);
2756 exp = TREE_OPERAND (exp, 0);
2757 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2758 if (TREE_CODE (and_mask) != INTEGER_CST)
2759 return 0;
2760 }
2761
2762 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2763 punsignedp, pvolatilep);
2764 if ((inner == exp && and_mask == 0)
2765 || *pbitsize < 0 || offset != 0
2766 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
2767 return 0;
2768
2769 /* If the number of bits in the reference is the same as the bitsize of
2770 the outer type, then the outer type gives the signedness. Otherwise
2771 (in case of a small bitfield) the signedness is unchanged. */
2772 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
2773 *punsignedp = TREE_UNSIGNED (outer_type);
2774
2775 /* Compute the mask to access the bitfield. */
2776 unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
2777 precision = TYPE_PRECISION (unsigned_type);
2778
2779 mask = build_int_2 (~0, ~0);
2780 TREE_TYPE (mask) = unsigned_type;
2781 force_fit_type (mask, 0);
2782 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2783 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2784
2785 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2786 if (and_mask != 0)
2787 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2788 convert (unsigned_type, and_mask), mask));
2789
2790 *pmask = mask;
2791 *pand_mask = and_mask;
2792 return inner;
2793 }
2794
2795 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
2796 bit positions. */
2797
2798 static int
2799 all_ones_mask_p (tree mask, int size)
2800 {
2801 tree type = TREE_TYPE (mask);
2802 unsigned int precision = TYPE_PRECISION (type);
2803 tree tmask;
2804
2805 tmask = build_int_2 (~0, ~0);
2806 TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
2807 force_fit_type (tmask, 0);
2808 return
2809 tree_int_cst_equal (mask,
2810 const_binop (RSHIFT_EXPR,
2811 const_binop (LSHIFT_EXPR, tmask,
2812 size_int (precision - size),
2813 0),
2814 size_int (precision - size), 0));
2815 }
2816
2817 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
2818 represents the sign bit of EXP's type. If EXP represents a sign
2819 or zero extension, also test VAL against the unextended type.
2820 The return value is the (sub)expression whose sign bit is VAL,
2821 or NULL_TREE otherwise. */
2822
2823 static tree
2824 sign_bit_p (tree exp, tree val)
2825 {
2826 unsigned HOST_WIDE_INT mask_lo, lo;
2827 HOST_WIDE_INT mask_hi, hi;
2828 int width;
2829 tree t;
2830
2831 /* Tree EXP must have an integral type. */
2832 t = TREE_TYPE (exp);
2833 if (! INTEGRAL_TYPE_P (t))
2834 return NULL_TREE;
2835
2836 /* Tree VAL must be an integer constant. */
2837 if (TREE_CODE (val) != INTEGER_CST
2838 || TREE_CONSTANT_OVERFLOW (val))
2839 return NULL_TREE;
2840
2841 width = TYPE_PRECISION (t);
2842 if (width > HOST_BITS_PER_WIDE_INT)
2843 {
2844 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
2845 lo = 0;
2846
2847 mask_hi = ((unsigned HOST_WIDE_INT) -1
2848 >> (2 * HOST_BITS_PER_WIDE_INT - width));
2849 mask_lo = -1;
2850 }
2851 else
2852 {
2853 hi = 0;
2854 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
2855
2856 mask_hi = 0;
2857 mask_lo = ((unsigned HOST_WIDE_INT) -1
2858 >> (HOST_BITS_PER_WIDE_INT - width));
2859 }
2860
2861 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
2862 treat VAL as if it were unsigned. */
2863 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
2864 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
2865 return exp;
2866
2867 /* Handle extension from a narrower type. */
2868 if (TREE_CODE (exp) == NOP_EXPR
2869 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
2870 return sign_bit_p (TREE_OPERAND (exp, 0), val);
2871
2872 return NULL_TREE;
2873 }
2874
2875 /* Subroutine for fold_truthop: determine if an operand is simple enough
2876 to be evaluated unconditionally. */
2877
2878 static int
2879 simple_operand_p (tree exp)
2880 {
2881 /* Strip any conversions that don't change the machine mode. */
2882 while ((TREE_CODE (exp) == NOP_EXPR
2883 || TREE_CODE (exp) == CONVERT_EXPR)
2884 && (TYPE_MODE (TREE_TYPE (exp))
2885 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2886 exp = TREE_OPERAND (exp, 0);
2887
2888 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2889 || (DECL_P (exp)
2890 && ! TREE_ADDRESSABLE (exp)
2891 && ! TREE_THIS_VOLATILE (exp)
2892 && ! DECL_NONLOCAL (exp)
2893 /* Don't regard global variables as simple. They may be
2894 allocated in ways unknown to the compiler (shared memory,
2895 #pragma weak, etc). */
2896 && ! TREE_PUBLIC (exp)
2897 && ! DECL_EXTERNAL (exp)
2898 /* Loading a static variable is unduly expensive, but global
2899 registers aren't expensive. */
2900 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2901 }
2902 \f
2903 /* The following functions are subroutines to fold_range_test and allow it to
2904 try to change a logical combination of comparisons into a range test.
2905
2906 For example, both
2907 X == 2 || X == 3 || X == 4 || X == 5
2908 and
2909 X >= 2 && X <= 5
2910 are converted to
2911 (unsigned) (X - 2) <= 3
2912
2913 We describe each set of comparisons as being either inside or outside
2914 a range, using a variable named like IN_P, and then describe the
2915 range with a lower and upper bound. If one of the bounds is omitted,
2916 it represents either the highest or lowest value of the type.
2917
2918 In the comments below, we represent a range by two numbers in brackets
2919 preceded by a "+" to designate being inside that range, or a "-" to
2920 designate being outside that range, so the condition can be inverted by
2921 flipping the prefix. An omitted bound is represented by a "-". For
2922 example, "- [-, 10]" means being outside the range starting at the lowest
2923 possible value and ending at 10, in other words, being greater than 10.
2924 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
2925 always false.
2926
2927 We set up things so that the missing bounds are handled in a consistent
2928 manner so neither a missing bound nor "true" and "false" need to be
2929 handled using a special case. */
2930
2931 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
2932 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
2933 and UPPER1_P are nonzero if the respective argument is an upper bound
2934 and zero for a lower. TYPE, if nonzero, is the type of the result; it
2935 must be specified for a comparison. ARG1 will be converted to ARG0's
2936 type if both are specified. */
2937
2938 static tree
2939 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
2940 tree arg1, int upper1_p)
2941 {
2942 tree tem;
2943 int result;
2944 int sgn0, sgn1;
2945
2946 /* If neither arg represents infinity, do the normal operation.
2947 Else, if not a comparison, return infinity. Else handle the special
2948 comparison rules. Note that most of the cases below won't occur, but
2949 are handled for consistency. */
2950
2951 if (arg0 != 0 && arg1 != 0)
2952 {
2953 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
2954 arg0, convert (TREE_TYPE (arg0), arg1)));
2955 STRIP_NOPS (tem);
2956 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
2957 }
2958
2959 if (TREE_CODE_CLASS (code) != '<')
2960 return 0;
2961
2962 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
2963 for neither. In real maths, we cannot assume open ended ranges are
2964 the same. But, this is computer arithmetic, where numbers are finite.
2965 We can therefore make the transformation of any unbounded range with
2966 the value Z, Z being greater than any representable number. This permits
2967 us to treat unbounded ranges as equal. */
2968 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
2969 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
2970 switch (code)
2971 {
2972 case EQ_EXPR:
2973 result = sgn0 == sgn1;
2974 break;
2975 case NE_EXPR:
2976 result = sgn0 != sgn1;
2977 break;
2978 case LT_EXPR:
2979 result = sgn0 < sgn1;
2980 break;
2981 case LE_EXPR:
2982 result = sgn0 <= sgn1;
2983 break;
2984 case GT_EXPR:
2985 result = sgn0 > sgn1;
2986 break;
2987 case GE_EXPR:
2988 result = sgn0 >= sgn1;
2989 break;
2990 default:
2991 abort ();
2992 }
2993
2994 return convert (type, result ? integer_one_node : integer_zero_node);
2995 }
2996 \f
2997 /* Given EXP, a logical expression, set the range it is testing into
2998 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
2999 actually being tested. *PLOW and *PHIGH will be made of the same type
3000 as the returned expression. If EXP is not a comparison, we will most
3001 likely not be returning a useful value and range. */
3002
3003 static tree
3004 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3005 {
3006 enum tree_code code;
3007 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3008 tree orig_type = NULL_TREE;
3009 int in_p, n_in_p;
3010 tree low, high, n_low, n_high;
3011
3012 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3013 and see if we can refine the range. Some of the cases below may not
3014 happen, but it doesn't seem worth worrying about this. We "continue"
3015 the outer loop when we've changed something; otherwise we "break"
3016 the switch, which will "break" the while. */
3017
3018 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
3019
3020 while (1)
3021 {
3022 code = TREE_CODE (exp);
3023
3024 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3025 {
3026 if (first_rtl_op (code) > 0)
3027 arg0 = TREE_OPERAND (exp, 0);
3028 if (TREE_CODE_CLASS (code) == '<'
3029 || TREE_CODE_CLASS (code) == '1'
3030 || TREE_CODE_CLASS (code) == '2')
3031 type = TREE_TYPE (arg0);
3032 if (TREE_CODE_CLASS (code) == '2'
3033 || TREE_CODE_CLASS (code) == '<'
3034 || (TREE_CODE_CLASS (code) == 'e'
3035 && TREE_CODE_LENGTH (code) > 1))
3036 arg1 = TREE_OPERAND (exp, 1);
3037 }
3038
3039 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3040 lose a cast by accident. */
3041 if (type != NULL_TREE && orig_type == NULL_TREE)
3042 orig_type = type;
3043
3044 switch (code)
3045 {
3046 case TRUTH_NOT_EXPR:
3047 in_p = ! in_p, exp = arg0;
3048 continue;
3049
3050 case EQ_EXPR: case NE_EXPR:
3051 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3052 /* We can only do something if the range is testing for zero
3053 and if the second operand is an integer constant. Note that
3054 saying something is "in" the range we make is done by
3055 complementing IN_P since it will set in the initial case of
3056 being not equal to zero; "out" is leaving it alone. */
3057 if (low == 0 || high == 0
3058 || ! integer_zerop (low) || ! integer_zerop (high)
3059 || TREE_CODE (arg1) != INTEGER_CST)
3060 break;
3061
3062 switch (code)
3063 {
3064 case NE_EXPR: /* - [c, c] */
3065 low = high = arg1;
3066 break;
3067 case EQ_EXPR: /* + [c, c] */
3068 in_p = ! in_p, low = high = arg1;
3069 break;
3070 case GT_EXPR: /* - [-, c] */
3071 low = 0, high = arg1;
3072 break;
3073 case GE_EXPR: /* + [c, -] */
3074 in_p = ! in_p, low = arg1, high = 0;
3075 break;
3076 case LT_EXPR: /* - [c, -] */
3077 low = arg1, high = 0;
3078 break;
3079 case LE_EXPR: /* + [-, c] */
3080 in_p = ! in_p, low = 0, high = arg1;
3081 break;
3082 default:
3083 abort ();
3084 }
3085
3086 exp = arg0;
3087
3088 /* If this is an unsigned comparison, we also know that EXP is
3089 greater than or equal to zero. We base the range tests we make
3090 on that fact, so we record it here so we can parse existing
3091 range tests. */
3092 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3093 {
3094 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3095 1, convert (type, integer_zero_node),
3096 NULL_TREE))
3097 break;
3098
3099 in_p = n_in_p, low = n_low, high = n_high;
3100
3101 /* If the high bound is missing, but we have a nonzero low
3102 bound, reverse the range so it goes from zero to the low bound
3103 minus 1. */
3104 if (high == 0 && low && ! integer_zerop (low))
3105 {
3106 in_p = ! in_p;
3107 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3108 integer_one_node, 0);
3109 low = convert (type, integer_zero_node);
3110 }
3111 }
3112 continue;
3113
3114 case NEGATE_EXPR:
3115 /* (-x) IN [a,b] -> x in [-b, -a] */
3116 n_low = range_binop (MINUS_EXPR, type,
3117 convert (type, integer_zero_node), 0, high, 1);
3118 n_high = range_binop (MINUS_EXPR, type,
3119 convert (type, integer_zero_node), 0, low, 0);
3120 low = n_low, high = n_high;
3121 exp = arg0;
3122 continue;
3123
3124 case BIT_NOT_EXPR:
3125 /* ~ X -> -X - 1 */
3126 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3127 convert (type, integer_one_node));
3128 continue;
3129
3130 case PLUS_EXPR: case MINUS_EXPR:
3131 if (TREE_CODE (arg1) != INTEGER_CST)
3132 break;
3133
3134 /* If EXP is signed, any overflow in the computation is undefined,
3135 so we don't worry about it so long as our computations on
3136 the bounds don't overflow. For unsigned, overflow is defined
3137 and this is exactly the right thing. */
3138 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3139 type, low, 0, arg1, 0);
3140 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3141 type, high, 1, arg1, 0);
3142 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3143 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3144 break;
3145
3146 /* Check for an unsigned range which has wrapped around the maximum
3147 value thus making n_high < n_low, and normalize it. */
3148 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3149 {
3150 low = range_binop (PLUS_EXPR, type, n_high, 0,
3151 integer_one_node, 0);
3152 high = range_binop (MINUS_EXPR, type, n_low, 0,
3153 integer_one_node, 0);
3154
3155 /* If the range is of the form +/- [ x+1, x ], we won't
3156 be able to normalize it. But then, it represents the
3157 whole range or the empty set, so make it
3158 +/- [ -, - ]. */
3159 if (tree_int_cst_equal (n_low, low)
3160 && tree_int_cst_equal (n_high, high))
3161 low = high = 0;
3162 else
3163 in_p = ! in_p;
3164 }
3165 else
3166 low = n_low, high = n_high;
3167
3168 exp = arg0;
3169 continue;
3170
3171 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3172 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3173 break;
3174
3175 if (! INTEGRAL_TYPE_P (type)
3176 || (low != 0 && ! int_fits_type_p (low, type))
3177 || (high != 0 && ! int_fits_type_p (high, type)))
3178 break;
3179
3180 n_low = low, n_high = high;
3181
3182 if (n_low != 0)
3183 n_low = convert (type, n_low);
3184
3185 if (n_high != 0)
3186 n_high = convert (type, n_high);
3187
3188 /* If we're converting from an unsigned to a signed type,
3189 we will be doing the comparison as unsigned. The tests above
3190 have already verified that LOW and HIGH are both positive.
3191
3192 So we have to make sure that the original unsigned value will
3193 be interpreted as positive. */
3194 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3195 {
3196 tree equiv_type = (*lang_hooks.types.type_for_mode)
3197 (TYPE_MODE (type), 1);
3198 tree high_positive;
3199
3200 /* A range without an upper bound is, naturally, unbounded.
3201 Since convert would have cropped a very large value, use
3202 the max value for the destination type. */
3203 high_positive
3204 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3205 : TYPE_MAX_VALUE (type);
3206
3207 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3208 high_positive = fold (build (RSHIFT_EXPR, type,
3209 convert (type, high_positive),
3210 convert (type, integer_one_node)));
3211
3212 /* If the low bound is specified, "and" the range with the
3213 range for which the original unsigned value will be
3214 positive. */
3215 if (low != 0)
3216 {
3217 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3218 1, n_low, n_high,
3219 1, convert (type, integer_zero_node),
3220 high_positive))
3221 break;
3222
3223 in_p = (n_in_p == in_p);
3224 }
3225 else
3226 {
3227 /* Otherwise, "or" the range with the range of the input
3228 that will be interpreted as negative. */
3229 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3230 0, n_low, n_high,
3231 1, convert (type, integer_zero_node),
3232 high_positive))
3233 break;
3234
3235 in_p = (in_p != n_in_p);
3236 }
3237 }
3238
3239 exp = arg0;
3240 low = n_low, high = n_high;
3241 continue;
3242
3243 default:
3244 break;
3245 }
3246
3247 break;
3248 }
3249
3250 /* If EXP is a constant, we can evaluate whether this is true or false. */
3251 if (TREE_CODE (exp) == INTEGER_CST)
3252 {
3253 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3254 exp, 0, low, 0))
3255 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3256 exp, 1, high, 1)));
3257 low = high = 0;
3258 exp = 0;
3259 }
3260
3261 *pin_p = in_p, *plow = low, *phigh = high;
3262 return exp;
3263 }
3264 \f
3265 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3266 type, TYPE, return an expression to test if EXP is in (or out of, depending
3267 on IN_P) the range. */
3268
3269 static tree
3270 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3271 {
3272 tree etype = TREE_TYPE (exp);
3273 tree value;
3274
3275 if (! in_p
3276 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3277 return invert_truthvalue (value);
3278
3279 if (low == 0 && high == 0)
3280 return convert (type, integer_one_node);
3281
3282 if (low == 0)
3283 return fold (build (LE_EXPR, type, exp, high));
3284
3285 if (high == 0)
3286 return fold (build (GE_EXPR, type, exp, low));
3287
3288 if (operand_equal_p (low, high, 0))
3289 return fold (build (EQ_EXPR, type, exp, low));
3290
3291 if (integer_zerop (low))
3292 {
3293 if (! TREE_UNSIGNED (etype))
3294 {
3295 etype = (*lang_hooks.types.unsigned_type) (etype);
3296 high = convert (etype, high);
3297 exp = convert (etype, exp);
3298 }
3299 return build_range_check (type, exp, 1, 0, high);
3300 }
3301
3302 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3303 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3304 {
3305 unsigned HOST_WIDE_INT lo;
3306 HOST_WIDE_INT hi;
3307 int prec;
3308
3309 prec = TYPE_PRECISION (etype);
3310 if (prec <= HOST_BITS_PER_WIDE_INT)
3311 {
3312 hi = 0;
3313 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3314 }
3315 else
3316 {
3317 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3318 lo = (unsigned HOST_WIDE_INT) -1;
3319 }
3320
3321 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3322 {
3323 if (TREE_UNSIGNED (etype))
3324 {
3325 etype = (*lang_hooks.types.signed_type) (etype);
3326 exp = convert (etype, exp);
3327 }
3328 return fold (build (GT_EXPR, type, exp,
3329 convert (etype, integer_zero_node)));
3330 }
3331 }
3332
3333 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3334 && ! TREE_OVERFLOW (value))
3335 return build_range_check (type,
3336 fold (build (MINUS_EXPR, etype, exp, low)),
3337 1, convert (etype, integer_zero_node), value);
3338
3339 return 0;
3340 }
3341 \f
3342 /* Given two ranges, see if we can merge them into one. Return 1 if we
3343 can, 0 if we can't. Set the output range into the specified parameters. */
3344
3345 static int
3346 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3347 tree high0, int in1_p, tree low1, tree high1)
3348 {
3349 int no_overlap;
3350 int subset;
3351 int temp;
3352 tree tem;
3353 int in_p;
3354 tree low, high;
3355 int lowequal = ((low0 == 0 && low1 == 0)
3356 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3357 low0, 0, low1, 0)));
3358 int highequal = ((high0 == 0 && high1 == 0)
3359 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3360 high0, 1, high1, 1)));
3361
3362 /* Make range 0 be the range that starts first, or ends last if they
3363 start at the same value. Swap them if it isn't. */
3364 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3365 low0, 0, low1, 0))
3366 || (lowequal
3367 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3368 high1, 1, high0, 1))))
3369 {
3370 temp = in0_p, in0_p = in1_p, in1_p = temp;
3371 tem = low0, low0 = low1, low1 = tem;
3372 tem = high0, high0 = high1, high1 = tem;
3373 }
3374
3375 /* Now flag two cases, whether the ranges are disjoint or whether the
3376 second range is totally subsumed in the first. Note that the tests
3377 below are simplified by the ones above. */
3378 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3379 high0, 1, low1, 0));
3380 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3381 high1, 1, high0, 1));
3382
3383 /* We now have four cases, depending on whether we are including or
3384 excluding the two ranges. */
3385 if (in0_p && in1_p)
3386 {
3387 /* If they don't overlap, the result is false. If the second range
3388 is a subset it is the result. Otherwise, the range is from the start
3389 of the second to the end of the first. */
3390 if (no_overlap)
3391 in_p = 0, low = high = 0;
3392 else if (subset)
3393 in_p = 1, low = low1, high = high1;
3394 else
3395 in_p = 1, low = low1, high = high0;
3396 }
3397
3398 else if (in0_p && ! in1_p)
3399 {
3400 /* If they don't overlap, the result is the first range. If they are
3401 equal, the result is false. If the second range is a subset of the
3402 first, and the ranges begin at the same place, we go from just after
3403 the end of the first range to the end of the second. If the second
3404 range is not a subset of the first, or if it is a subset and both
3405 ranges end at the same place, the range starts at the start of the
3406 first range and ends just before the second range.
3407 Otherwise, we can't describe this as a single range. */
3408 if (no_overlap)
3409 in_p = 1, low = low0, high = high0;
3410 else if (lowequal && highequal)
3411 in_p = 0, low = high = 0;
3412 else if (subset && lowequal)
3413 {
3414 in_p = 1, high = high0;
3415 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3416 integer_one_node, 0);
3417 }
3418 else if (! subset || highequal)
3419 {
3420 in_p = 1, low = low0;
3421 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3422 integer_one_node, 0);
3423 }
3424 else
3425 return 0;
3426 }
3427
3428 else if (! in0_p && in1_p)
3429 {
3430 /* If they don't overlap, the result is the second range. If the second
3431 is a subset of the first, the result is false. Otherwise,
3432 the range starts just after the first range and ends at the
3433 end of the second. */
3434 if (no_overlap)
3435 in_p = 1, low = low1, high = high1;
3436 else if (subset || highequal)
3437 in_p = 0, low = high = 0;
3438 else
3439 {
3440 in_p = 1, high = high1;
3441 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3442 integer_one_node, 0);
3443 }
3444 }
3445
3446 else
3447 {
3448 /* The case where we are excluding both ranges. Here the complex case
3449 is if they don't overlap. In that case, the only time we have a
3450 range is if they are adjacent. If the second is a subset of the
3451 first, the result is the first. Otherwise, the range to exclude
3452 starts at the beginning of the first range and ends at the end of the
3453 second. */
3454 if (no_overlap)
3455 {
3456 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3457 range_binop (PLUS_EXPR, NULL_TREE,
3458 high0, 1,
3459 integer_one_node, 1),
3460 1, low1, 0)))
3461 in_p = 0, low = low0, high = high1;
3462 else
3463 return 0;
3464 }
3465 else if (subset)
3466 in_p = 0, low = low0, high = high0;
3467 else
3468 in_p = 0, low = low0, high = high1;
3469 }
3470
3471 *pin_p = in_p, *plow = low, *phigh = high;
3472 return 1;
3473 }
3474 \f
3475 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3476 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3477 #endif
3478
3479 /* EXP is some logical combination of boolean tests. See if we can
3480 merge it into some range test. Return the new tree if so. */
3481
3482 static tree
3483 fold_range_test (tree exp)
3484 {
3485 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3486 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3487 int in0_p, in1_p, in_p;
3488 tree low0, low1, low, high0, high1, high;
3489 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3490 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3491 tree tem;
3492
3493 /* If this is an OR operation, invert both sides; we will invert
3494 again at the end. */
3495 if (or_op)
3496 in0_p = ! in0_p, in1_p = ! in1_p;
3497
3498 /* If both expressions are the same, if we can merge the ranges, and we
3499 can build the range test, return it or it inverted. If one of the
3500 ranges is always true or always false, consider it to be the same
3501 expression as the other. */
3502 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3503 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3504 in1_p, low1, high1)
3505 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3506 lhs != 0 ? lhs
3507 : rhs != 0 ? rhs : integer_zero_node,
3508 in_p, low, high))))
3509 return or_op ? invert_truthvalue (tem) : tem;
3510
3511 /* On machines where the branch cost is expensive, if this is a
3512 short-circuited branch and the underlying object on both sides
3513 is the same, make a non-short-circuit operation. */
3514 else if (RANGE_TEST_NON_SHORT_CIRCUIT
3515 && lhs != 0 && rhs != 0
3516 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3517 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3518 && operand_equal_p (lhs, rhs, 0))
3519 {
3520 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3521 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3522 which cases we can't do this. */
3523 if (simple_operand_p (lhs))
3524 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3525 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3526 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3527 TREE_OPERAND (exp, 1));
3528
3529 else if ((*lang_hooks.decls.global_bindings_p) () == 0
3530 && ! CONTAINS_PLACEHOLDER_P (lhs))
3531 {
3532 tree common = save_expr (lhs);
3533
3534 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3535 or_op ? ! in0_p : in0_p,
3536 low0, high0))
3537 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3538 or_op ? ! in1_p : in1_p,
3539 low1, high1))))
3540 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3541 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3542 TREE_TYPE (exp), lhs, rhs);
3543 }
3544 }
3545
3546 return 0;
3547 }
3548 \f
3549 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3550 bit value. Arrange things so the extra bits will be set to zero if and
3551 only if C is signed-extended to its full width. If MASK is nonzero,
3552 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3553
3554 static tree
3555 unextend (tree c, int p, int unsignedp, tree mask)
3556 {
3557 tree type = TREE_TYPE (c);
3558 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3559 tree temp;
3560
3561 if (p == modesize || unsignedp)
3562 return c;
3563
3564 /* We work by getting just the sign bit into the low-order bit, then
3565 into the high-order bit, then sign-extend. We then XOR that value
3566 with C. */
3567 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3568 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3569
3570 /* We must use a signed type in order to get an arithmetic right shift.
3571 However, we must also avoid introducing accidental overflows, so that
3572 a subsequent call to integer_zerop will work. Hence we must
3573 do the type conversion here. At this point, the constant is either
3574 zero or one, and the conversion to a signed type can never overflow.
3575 We could get an overflow if this conversion is done anywhere else. */
3576 if (TREE_UNSIGNED (type))
3577 temp = convert ((*lang_hooks.types.signed_type) (type), temp);
3578
3579 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3580 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3581 if (mask != 0)
3582 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3583 /* If necessary, convert the type back to match the type of C. */
3584 if (TREE_UNSIGNED (type))
3585 temp = convert (type, temp);
3586
3587 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3588 }
3589 \f
3590 /* Find ways of folding logical expressions of LHS and RHS:
3591 Try to merge two comparisons to the same innermost item.
3592 Look for range tests like "ch >= '0' && ch <= '9'".
3593 Look for combinations of simple terms on machines with expensive branches
3594 and evaluate the RHS unconditionally.
3595
3596 For example, if we have p->a == 2 && p->b == 4 and we can make an
3597 object large enough to span both A and B, we can do this with a comparison
3598 against the object ANDed with the a mask.
3599
3600 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3601 operations to do this with one comparison.
3602
3603 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3604 function and the one above.
3605
3606 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3607 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3608
3609 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3610 two operands.
3611
3612 We return the simplified tree or 0 if no optimization is possible. */
3613
3614 static tree
3615 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
3616 {
3617 /* If this is the "or" of two comparisons, we can do something if
3618 the comparisons are NE_EXPR. If this is the "and", we can do something
3619 if the comparisons are EQ_EXPR. I.e.,
3620 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3621
3622 WANTED_CODE is this operation code. For single bit fields, we can
3623 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3624 comparison for one-bit fields. */
3625
3626 enum tree_code wanted_code;
3627 enum tree_code lcode, rcode;
3628 tree ll_arg, lr_arg, rl_arg, rr_arg;
3629 tree ll_inner, lr_inner, rl_inner, rr_inner;
3630 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3631 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3632 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3633 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3634 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3635 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3636 enum machine_mode lnmode, rnmode;
3637 tree ll_mask, lr_mask, rl_mask, rr_mask;
3638 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3639 tree l_const, r_const;
3640 tree lntype, rntype, result;
3641 int first_bit, end_bit;
3642 int volatilep;
3643
3644 /* Start by getting the comparison codes. Fail if anything is volatile.
3645 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3646 it were surrounded with a NE_EXPR. */
3647
3648 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3649 return 0;
3650
3651 lcode = TREE_CODE (lhs);
3652 rcode = TREE_CODE (rhs);
3653
3654 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3655 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3656
3657 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3658 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3659
3660 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3661 return 0;
3662
3663 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3664 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3665
3666 ll_arg = TREE_OPERAND (lhs, 0);
3667 lr_arg = TREE_OPERAND (lhs, 1);
3668 rl_arg = TREE_OPERAND (rhs, 0);
3669 rr_arg = TREE_OPERAND (rhs, 1);
3670
3671 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3672 if (simple_operand_p (ll_arg)
3673 && simple_operand_p (lr_arg)
3674 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3675 {
3676 int compcode;
3677
3678 if (operand_equal_p (ll_arg, rl_arg, 0)
3679 && operand_equal_p (lr_arg, rr_arg, 0))
3680 {
3681 int lcompcode, rcompcode;
3682
3683 lcompcode = comparison_to_compcode (lcode);
3684 rcompcode = comparison_to_compcode (rcode);
3685 compcode = (code == TRUTH_AND_EXPR)
3686 ? lcompcode & rcompcode
3687 : lcompcode | rcompcode;
3688 }
3689 else if (operand_equal_p (ll_arg, rr_arg, 0)
3690 && operand_equal_p (lr_arg, rl_arg, 0))
3691 {
3692 int lcompcode, rcompcode;
3693
3694 rcode = swap_tree_comparison (rcode);
3695 lcompcode = comparison_to_compcode (lcode);
3696 rcompcode = comparison_to_compcode (rcode);
3697 compcode = (code == TRUTH_AND_EXPR)
3698 ? lcompcode & rcompcode
3699 : lcompcode | rcompcode;
3700 }
3701 else
3702 compcode = -1;
3703
3704 if (compcode == COMPCODE_TRUE)
3705 return convert (truth_type, integer_one_node);
3706 else if (compcode == COMPCODE_FALSE)
3707 return convert (truth_type, integer_zero_node);
3708 else if (compcode != -1)
3709 return build (compcode_to_comparison (compcode),
3710 truth_type, ll_arg, lr_arg);
3711 }
3712
3713 /* If the RHS can be evaluated unconditionally and its operands are
3714 simple, it wins to evaluate the RHS unconditionally on machines
3715 with expensive branches. In this case, this isn't a comparison
3716 that can be merged. Avoid doing this if the RHS is a floating-point
3717 comparison since those can trap. */
3718
3719 if (BRANCH_COST >= 2
3720 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3721 && simple_operand_p (rl_arg)
3722 && simple_operand_p (rr_arg))
3723 {
3724 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
3725 if (code == TRUTH_OR_EXPR
3726 && lcode == NE_EXPR && integer_zerop (lr_arg)
3727 && rcode == NE_EXPR && integer_zerop (rr_arg)
3728 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3729 return build (NE_EXPR, truth_type,
3730 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3731 ll_arg, rl_arg),
3732 integer_zero_node);
3733
3734 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
3735 if (code == TRUTH_AND_EXPR
3736 && lcode == EQ_EXPR && integer_zerop (lr_arg)
3737 && rcode == EQ_EXPR && integer_zerop (rr_arg)
3738 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3739 return build (EQ_EXPR, truth_type,
3740 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3741 ll_arg, rl_arg),
3742 integer_zero_node);
3743
3744 return build (code, truth_type, lhs, rhs);
3745 }
3746
3747 /* See if the comparisons can be merged. Then get all the parameters for
3748 each side. */
3749
3750 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3751 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3752 return 0;
3753
3754 volatilep = 0;
3755 ll_inner = decode_field_reference (ll_arg,
3756 &ll_bitsize, &ll_bitpos, &ll_mode,
3757 &ll_unsignedp, &volatilep, &ll_mask,
3758 &ll_and_mask);
3759 lr_inner = decode_field_reference (lr_arg,
3760 &lr_bitsize, &lr_bitpos, &lr_mode,
3761 &lr_unsignedp, &volatilep, &lr_mask,
3762 &lr_and_mask);
3763 rl_inner = decode_field_reference (rl_arg,
3764 &rl_bitsize, &rl_bitpos, &rl_mode,
3765 &rl_unsignedp, &volatilep, &rl_mask,
3766 &rl_and_mask);
3767 rr_inner = decode_field_reference (rr_arg,
3768 &rr_bitsize, &rr_bitpos, &rr_mode,
3769 &rr_unsignedp, &volatilep, &rr_mask,
3770 &rr_and_mask);
3771
3772 /* It must be true that the inner operation on the lhs of each
3773 comparison must be the same if we are to be able to do anything.
3774 Then see if we have constants. If not, the same must be true for
3775 the rhs's. */
3776 if (volatilep || ll_inner == 0 || rl_inner == 0
3777 || ! operand_equal_p (ll_inner, rl_inner, 0))
3778 return 0;
3779
3780 if (TREE_CODE (lr_arg) == INTEGER_CST
3781 && TREE_CODE (rr_arg) == INTEGER_CST)
3782 l_const = lr_arg, r_const = rr_arg;
3783 else if (lr_inner == 0 || rr_inner == 0
3784 || ! operand_equal_p (lr_inner, rr_inner, 0))
3785 return 0;
3786 else
3787 l_const = r_const = 0;
3788
3789 /* If either comparison code is not correct for our logical operation,
3790 fail. However, we can convert a one-bit comparison against zero into
3791 the opposite comparison against that bit being set in the field. */
3792
3793 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3794 if (lcode != wanted_code)
3795 {
3796 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3797 {
3798 /* Make the left operand unsigned, since we are only interested
3799 in the value of one bit. Otherwise we are doing the wrong
3800 thing below. */
3801 ll_unsignedp = 1;
3802 l_const = ll_mask;
3803 }
3804 else
3805 return 0;
3806 }
3807
3808 /* This is analogous to the code for l_const above. */
3809 if (rcode != wanted_code)
3810 {
3811 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3812 {
3813 rl_unsignedp = 1;
3814 r_const = rl_mask;
3815 }
3816 else
3817 return 0;
3818 }
3819
3820 /* After this point all optimizations will generate bit-field
3821 references, which we might not want. */
3822 if (! (*lang_hooks.can_use_bit_fields_p) ())
3823 return 0;
3824
3825 /* See if we can find a mode that contains both fields being compared on
3826 the left. If we can't, fail. Otherwise, update all constants and masks
3827 to be relative to a field of that size. */
3828 first_bit = MIN (ll_bitpos, rl_bitpos);
3829 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3830 lnmode = get_best_mode (end_bit - first_bit, first_bit,
3831 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3832 volatilep);
3833 if (lnmode == VOIDmode)
3834 return 0;
3835
3836 lnbitsize = GET_MODE_BITSIZE (lnmode);
3837 lnbitpos = first_bit & ~ (lnbitsize - 1);
3838 lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
3839 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3840
3841 if (BYTES_BIG_ENDIAN)
3842 {
3843 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3844 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3845 }
3846
3847 ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
3848 size_int (xll_bitpos), 0);
3849 rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
3850 size_int (xrl_bitpos), 0);
3851
3852 if (l_const)
3853 {
3854 l_const = convert (lntype, l_const);
3855 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3856 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3857 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3858 fold (build1 (BIT_NOT_EXPR,
3859 lntype, ll_mask)),
3860 0)))
3861 {
3862 warning ("comparison is always %d", wanted_code == NE_EXPR);
3863
3864 return convert (truth_type,
3865 wanted_code == NE_EXPR
3866 ? integer_one_node : integer_zero_node);
3867 }
3868 }
3869 if (r_const)
3870 {
3871 r_const = convert (lntype, r_const);
3872 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3873 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3874 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3875 fold (build1 (BIT_NOT_EXPR,
3876 lntype, rl_mask)),
3877 0)))
3878 {
3879 warning ("comparison is always %d", wanted_code == NE_EXPR);
3880
3881 return convert (truth_type,
3882 wanted_code == NE_EXPR
3883 ? integer_one_node : integer_zero_node);
3884 }
3885 }
3886
3887 /* If the right sides are not constant, do the same for it. Also,
3888 disallow this optimization if a size or signedness mismatch occurs
3889 between the left and right sides. */
3890 if (l_const == 0)
3891 {
3892 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3893 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3894 /* Make sure the two fields on the right
3895 correspond to the left without being swapped. */
3896 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3897 return 0;
3898
3899 first_bit = MIN (lr_bitpos, rr_bitpos);
3900 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3901 rnmode = get_best_mode (end_bit - first_bit, first_bit,
3902 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3903 volatilep);
3904 if (rnmode == VOIDmode)
3905 return 0;
3906
3907 rnbitsize = GET_MODE_BITSIZE (rnmode);
3908 rnbitpos = first_bit & ~ (rnbitsize - 1);
3909 rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
3910 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3911
3912 if (BYTES_BIG_ENDIAN)
3913 {
3914 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3915 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3916 }
3917
3918 lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
3919 size_int (xlr_bitpos), 0);
3920 rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
3921 size_int (xrr_bitpos), 0);
3922
3923 /* Make a mask that corresponds to both fields being compared.
3924 Do this for both items being compared. If the operands are the
3925 same size and the bits being compared are in the same position
3926 then we can do this by masking both and comparing the masked
3927 results. */
3928 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3929 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3930 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
3931 {
3932 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3933 ll_unsignedp || rl_unsignedp);
3934 if (! all_ones_mask_p (ll_mask, lnbitsize))
3935 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
3936
3937 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
3938 lr_unsignedp || rr_unsignedp);
3939 if (! all_ones_mask_p (lr_mask, rnbitsize))
3940 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
3941
3942 return build (wanted_code, truth_type, lhs, rhs);
3943 }
3944
3945 /* There is still another way we can do something: If both pairs of
3946 fields being compared are adjacent, we may be able to make a wider
3947 field containing them both.
3948
3949 Note that we still must mask the lhs/rhs expressions. Furthermore,
3950 the mask must be shifted to account for the shift done by
3951 make_bit_field_ref. */
3952 if ((ll_bitsize + ll_bitpos == rl_bitpos
3953 && lr_bitsize + lr_bitpos == rr_bitpos)
3954 || (ll_bitpos == rl_bitpos + rl_bitsize
3955 && lr_bitpos == rr_bitpos + rr_bitsize))
3956 {
3957 tree type;
3958
3959 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
3960 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
3961 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
3962 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
3963
3964 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
3965 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
3966 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
3967 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
3968
3969 /* Convert to the smaller type before masking out unwanted bits. */
3970 type = lntype;
3971 if (lntype != rntype)
3972 {
3973 if (lnbitsize > rnbitsize)
3974 {
3975 lhs = convert (rntype, lhs);
3976 ll_mask = convert (rntype, ll_mask);
3977 type = rntype;
3978 }
3979 else if (lnbitsize < rnbitsize)
3980 {
3981 rhs = convert (lntype, rhs);
3982 lr_mask = convert (lntype, lr_mask);
3983 type = lntype;
3984 }
3985 }
3986
3987 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
3988 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3989
3990 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
3991 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
3992
3993 return build (wanted_code, truth_type, lhs, rhs);
3994 }
3995
3996 return 0;
3997 }
3998
3999 /* Handle the case of comparisons with constants. If there is something in
4000 common between the masks, those bits of the constants must be the same.
4001 If not, the condition is always false. Test for this to avoid generating
4002 incorrect code below. */
4003 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4004 if (! integer_zerop (result)
4005 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4006 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4007 {
4008 if (wanted_code == NE_EXPR)
4009 {
4010 warning ("`or' of unmatched not-equal tests is always 1");
4011 return convert (truth_type, integer_one_node);
4012 }
4013 else
4014 {
4015 warning ("`and' of mutually exclusive equal-tests is always 0");
4016 return convert (truth_type, integer_zero_node);
4017 }
4018 }
4019
4020 /* Construct the expression we will return. First get the component
4021 reference we will make. Unless the mask is all ones the width of
4022 that field, perform the mask operation. Then compare with the
4023 merged constant. */
4024 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4025 ll_unsignedp || rl_unsignedp);
4026
4027 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4028 if (! all_ones_mask_p (ll_mask, lnbitsize))
4029 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
4030
4031 return build (wanted_code, truth_type, result,
4032 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4033 }
4034 \f
4035 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4036 constant. */
4037
4038 static tree
4039 optimize_minmax_comparison (tree t)
4040 {
4041 tree type = TREE_TYPE (t);
4042 tree arg0 = TREE_OPERAND (t, 0);
4043 enum tree_code op_code;
4044 tree comp_const = TREE_OPERAND (t, 1);
4045 tree minmax_const;
4046 int consts_equal, consts_lt;
4047 tree inner;
4048
4049 STRIP_SIGN_NOPS (arg0);
4050
4051 op_code = TREE_CODE (arg0);
4052 minmax_const = TREE_OPERAND (arg0, 1);
4053 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4054 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4055 inner = TREE_OPERAND (arg0, 0);
4056
4057 /* If something does not permit us to optimize, return the original tree. */
4058 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4059 || TREE_CODE (comp_const) != INTEGER_CST
4060 || TREE_CONSTANT_OVERFLOW (comp_const)
4061 || TREE_CODE (minmax_const) != INTEGER_CST
4062 || TREE_CONSTANT_OVERFLOW (minmax_const))
4063 return t;
4064
4065 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4066 and GT_EXPR, doing the rest with recursive calls using logical
4067 simplifications. */
4068 switch (TREE_CODE (t))
4069 {
4070 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4071 return
4072 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4073
4074 case GE_EXPR:
4075 return
4076 fold (build (TRUTH_ORIF_EXPR, type,
4077 optimize_minmax_comparison
4078 (build (EQ_EXPR, type, arg0, comp_const)),
4079 optimize_minmax_comparison
4080 (build (GT_EXPR, type, arg0, comp_const))));
4081
4082 case EQ_EXPR:
4083 if (op_code == MAX_EXPR && consts_equal)
4084 /* MAX (X, 0) == 0 -> X <= 0 */
4085 return fold (build (LE_EXPR, type, inner, comp_const));
4086
4087 else if (op_code == MAX_EXPR && consts_lt)
4088 /* MAX (X, 0) == 5 -> X == 5 */
4089 return fold (build (EQ_EXPR, type, inner, comp_const));
4090
4091 else if (op_code == MAX_EXPR)
4092 /* MAX (X, 0) == -1 -> false */
4093 return omit_one_operand (type, integer_zero_node, inner);
4094
4095 else if (consts_equal)
4096 /* MIN (X, 0) == 0 -> X >= 0 */
4097 return fold (build (GE_EXPR, type, inner, comp_const));
4098
4099 else if (consts_lt)
4100 /* MIN (X, 0) == 5 -> false */
4101 return omit_one_operand (type, integer_zero_node, inner);
4102
4103 else
4104 /* MIN (X, 0) == -1 -> X == -1 */
4105 return fold (build (EQ_EXPR, type, inner, comp_const));
4106
4107 case GT_EXPR:
4108 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4109 /* MAX (X, 0) > 0 -> X > 0
4110 MAX (X, 0) > 5 -> X > 5 */
4111 return fold (build (GT_EXPR, type, inner, comp_const));
4112
4113 else if (op_code == MAX_EXPR)
4114 /* MAX (X, 0) > -1 -> true */
4115 return omit_one_operand (type, integer_one_node, inner);
4116
4117 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4118 /* MIN (X, 0) > 0 -> false
4119 MIN (X, 0) > 5 -> false */
4120 return omit_one_operand (type, integer_zero_node, inner);
4121
4122 else
4123 /* MIN (X, 0) > -1 -> X > -1 */
4124 return fold (build (GT_EXPR, type, inner, comp_const));
4125
4126 default:
4127 return t;
4128 }
4129 }
4130 \f
4131 /* T is an integer expression that is being multiplied, divided, or taken a
4132 modulus (CODE says which and what kind of divide or modulus) by a
4133 constant C. See if we can eliminate that operation by folding it with
4134 other operations already in T. WIDE_TYPE, if non-null, is a type that
4135 should be used for the computation if wider than our type.
4136
4137 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4138 (X * 2) + (Y * 4). We must, however, be assured that either the original
4139 expression would not overflow or that overflow is undefined for the type
4140 in the language in question.
4141
4142 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4143 the machine has a multiply-accumulate insn or that this is part of an
4144 addressing calculation.
4145
4146 If we return a non-null expression, it is an equivalent form of the
4147 original computation, but need not be in the original type. */
4148
4149 static tree
4150 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4151 {
4152 /* To avoid exponential search depth, refuse to allow recursion past
4153 three levels. Beyond that (1) it's highly unlikely that we'll find
4154 something interesting and (2) we've probably processed it before
4155 when we built the inner expression. */
4156
4157 static int depth;
4158 tree ret;
4159
4160 if (depth > 3)
4161 return NULL;
4162
4163 depth++;
4164 ret = extract_muldiv_1 (t, c, code, wide_type);
4165 depth--;
4166
4167 return ret;
4168 }
4169
4170 static tree
4171 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4172 {
4173 tree type = TREE_TYPE (t);
4174 enum tree_code tcode = TREE_CODE (t);
4175 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4176 > GET_MODE_SIZE (TYPE_MODE (type)))
4177 ? wide_type : type);
4178 tree t1, t2;
4179 int same_p = tcode == code;
4180 tree op0 = NULL_TREE, op1 = NULL_TREE;
4181
4182 /* Don't deal with constants of zero here; they confuse the code below. */
4183 if (integer_zerop (c))
4184 return NULL_TREE;
4185
4186 if (TREE_CODE_CLASS (tcode) == '1')
4187 op0 = TREE_OPERAND (t, 0);
4188
4189 if (TREE_CODE_CLASS (tcode) == '2')
4190 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4191
4192 /* Note that we need not handle conditional operations here since fold
4193 already handles those cases. So just do arithmetic here. */
4194 switch (tcode)
4195 {
4196 case INTEGER_CST:
4197 /* For a constant, we can always simplify if we are a multiply
4198 or (for divide and modulus) if it is a multiple of our constant. */
4199 if (code == MULT_EXPR
4200 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4201 return const_binop (code, convert (ctype, t), convert (ctype, c), 0);
4202 break;
4203
4204 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4205 /* If op0 is an expression ... */
4206 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4207 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4208 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4209 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4210 /* ... and is unsigned, and its type is smaller than ctype,
4211 then we cannot pass through as widening. */
4212 && ((TREE_UNSIGNED (TREE_TYPE (op0))
4213 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4214 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4215 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4216 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4217 /* ... or its type is larger than ctype,
4218 then we cannot pass through this truncation. */
4219 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4220 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4221 /* ... or signedness changes for division or modulus,
4222 then we cannot pass through this conversion. */
4223 || (code != MULT_EXPR
4224 && (TREE_UNSIGNED (ctype)
4225 != TREE_UNSIGNED (TREE_TYPE (op0))))))
4226 break;
4227
4228 /* Pass the constant down and see if we can make a simplification. If
4229 we can, replace this expression with the inner simplification for
4230 possible later conversion to our or some other type. */
4231 if ((t2 = convert (TREE_TYPE (op0), c)) != 0
4232 && TREE_CODE (t2) == INTEGER_CST
4233 && ! TREE_CONSTANT_OVERFLOW (t2)
4234 && (0 != (t1 = extract_muldiv (op0, t2, code,
4235 code == MULT_EXPR
4236 ? ctype : NULL_TREE))))
4237 return t1;
4238 break;
4239
4240 case NEGATE_EXPR: case ABS_EXPR:
4241 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4242 return fold (build1 (tcode, ctype, convert (ctype, t1)));
4243 break;
4244
4245 case MIN_EXPR: case MAX_EXPR:
4246 /* If widening the type changes the signedness, then we can't perform
4247 this optimization as that changes the result. */
4248 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4249 break;
4250
4251 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4252 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4253 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4254 {
4255 if (tree_int_cst_sgn (c) < 0)
4256 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4257
4258 return fold (build (tcode, ctype, convert (ctype, t1),
4259 convert (ctype, t2)));
4260 }
4261 break;
4262
4263 case WITH_RECORD_EXPR:
4264 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4265 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4266 TREE_OPERAND (t, 1));
4267 break;
4268
4269 case LSHIFT_EXPR: case RSHIFT_EXPR:
4270 /* If the second operand is constant, this is a multiplication
4271 or floor division, by a power of two, so we can treat it that
4272 way unless the multiplier or divisor overflows. */
4273 if (TREE_CODE (op1) == INTEGER_CST
4274 /* const_binop may not detect overflow correctly,
4275 so check for it explicitly here. */
4276 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4277 && TREE_INT_CST_HIGH (op1) == 0
4278 && 0 != (t1 = convert (ctype,
4279 const_binop (LSHIFT_EXPR, size_one_node,
4280 op1, 0)))
4281 && ! TREE_OVERFLOW (t1))
4282 return extract_muldiv (build (tcode == LSHIFT_EXPR
4283 ? MULT_EXPR : FLOOR_DIV_EXPR,
4284 ctype, convert (ctype, op0), t1),
4285 c, code, wide_type);
4286 break;
4287
4288 case PLUS_EXPR: case MINUS_EXPR:
4289 /* See if we can eliminate the operation on both sides. If we can, we
4290 can return a new PLUS or MINUS. If we can't, the only remaining
4291 cases where we can do anything are if the second operand is a
4292 constant. */
4293 t1 = extract_muldiv (op0, c, code, wide_type);
4294 t2 = extract_muldiv (op1, c, code, wide_type);
4295 if (t1 != 0 && t2 != 0
4296 && (code == MULT_EXPR
4297 /* If not multiplication, we can only do this if both operands
4298 are divisible by c. */
4299 || (multiple_of_p (ctype, op0, c)
4300 && multiple_of_p (ctype, op1, c))))
4301 return fold (build (tcode, ctype, convert (ctype, t1),
4302 convert (ctype, t2)));
4303
4304 /* If this was a subtraction, negate OP1 and set it to be an addition.
4305 This simplifies the logic below. */
4306 if (tcode == MINUS_EXPR)
4307 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4308
4309 if (TREE_CODE (op1) != INTEGER_CST)
4310 break;
4311
4312 /* If either OP1 or C are negative, this optimization is not safe for
4313 some of the division and remainder types while for others we need
4314 to change the code. */
4315 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4316 {
4317 if (code == CEIL_DIV_EXPR)
4318 code = FLOOR_DIV_EXPR;
4319 else if (code == FLOOR_DIV_EXPR)
4320 code = CEIL_DIV_EXPR;
4321 else if (code != MULT_EXPR
4322 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4323 break;
4324 }
4325
4326 /* If it's a multiply or a division/modulus operation of a multiple
4327 of our constant, do the operation and verify it doesn't overflow. */
4328 if (code == MULT_EXPR
4329 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4330 {
4331 op1 = const_binop (code, convert (ctype, op1), convert (ctype, c), 0);
4332 if (op1 == 0 || TREE_OVERFLOW (op1))
4333 break;
4334 }
4335 else
4336 break;
4337
4338 /* If we have an unsigned type is not a sizetype, we cannot widen
4339 the operation since it will change the result if the original
4340 computation overflowed. */
4341 if (TREE_UNSIGNED (ctype)
4342 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4343 && ctype != type)
4344 break;
4345
4346 /* If we were able to eliminate our operation from the first side,
4347 apply our operation to the second side and reform the PLUS. */
4348 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4349 return fold (build (tcode, ctype, convert (ctype, t1), op1));
4350
4351 /* The last case is if we are a multiply. In that case, we can
4352 apply the distributive law to commute the multiply and addition
4353 if the multiplication of the constants doesn't overflow. */
4354 if (code == MULT_EXPR)
4355 return fold (build (tcode, ctype, fold (build (code, ctype,
4356 convert (ctype, op0),
4357 convert (ctype, c))),
4358 op1));
4359
4360 break;
4361
4362 case MULT_EXPR:
4363 /* We have a special case here if we are doing something like
4364 (C * 8) % 4 since we know that's zero. */
4365 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4366 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4367 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4368 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4369 return omit_one_operand (type, integer_zero_node, op0);
4370
4371 /* ... fall through ... */
4372
4373 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4374 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4375 /* If we can extract our operation from the LHS, do so and return a
4376 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4377 do something only if the second operand is a constant. */
4378 if (same_p
4379 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4380 return fold (build (tcode, ctype, convert (ctype, t1),
4381 convert (ctype, op1)));
4382 else if (tcode == MULT_EXPR && code == MULT_EXPR
4383 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4384 return fold (build (tcode, ctype, convert (ctype, op0),
4385 convert (ctype, t1)));
4386 else if (TREE_CODE (op1) != INTEGER_CST)
4387 return 0;
4388
4389 /* If these are the same operation types, we can associate them
4390 assuming no overflow. */
4391 if (tcode == code
4392 && 0 != (t1 = const_binop (MULT_EXPR, convert (ctype, op1),
4393 convert (ctype, c), 0))
4394 && ! TREE_OVERFLOW (t1))
4395 return fold (build (tcode, ctype, convert (ctype, op0), t1));
4396
4397 /* If these operations "cancel" each other, we have the main
4398 optimizations of this pass, which occur when either constant is a
4399 multiple of the other, in which case we replace this with either an
4400 operation or CODE or TCODE.
4401
4402 If we have an unsigned type that is not a sizetype, we cannot do
4403 this since it will change the result if the original computation
4404 overflowed. */
4405 if ((! TREE_UNSIGNED (ctype)
4406 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4407 && ! flag_wrapv
4408 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4409 || (tcode == MULT_EXPR
4410 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4411 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4412 {
4413 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4414 return fold (build (tcode, ctype, convert (ctype, op0),
4415 convert (ctype,
4416 const_binop (TRUNC_DIV_EXPR,
4417 op1, c, 0))));
4418 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4419 return fold (build (code, ctype, convert (ctype, op0),
4420 convert (ctype,
4421 const_binop (TRUNC_DIV_EXPR,
4422 c, op1, 0))));
4423 }
4424 break;
4425
4426 default:
4427 break;
4428 }
4429
4430 return 0;
4431 }
4432 \f
4433 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4434 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4435 that we may sometimes modify the tree. */
4436
4437 static tree
4438 strip_compound_expr (tree t, tree s)
4439 {
4440 enum tree_code code = TREE_CODE (t);
4441
4442 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4443 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4444 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4445 return TREE_OPERAND (t, 1);
4446
4447 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4448 don't bother handling any other types. */
4449 else if (code == COND_EXPR)
4450 {
4451 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4452 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4453 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4454 }
4455 else if (TREE_CODE_CLASS (code) == '1')
4456 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4457 else if (TREE_CODE_CLASS (code) == '<'
4458 || TREE_CODE_CLASS (code) == '2')
4459 {
4460 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4461 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4462 }
4463
4464 return t;
4465 }
4466 \f
4467 /* Return a node which has the indicated constant VALUE (either 0 or
4468 1), and is of the indicated TYPE. */
4469
4470 static tree
4471 constant_boolean_node (int value, tree type)
4472 {
4473 if (type == integer_type_node)
4474 return value ? integer_one_node : integer_zero_node;
4475 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4476 return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4477 integer_zero_node);
4478 else
4479 {
4480 tree t = build_int_2 (value, 0);
4481
4482 TREE_TYPE (t) = type;
4483 return t;
4484 }
4485 }
4486
4487 /* Utility function for the following routine, to see how complex a nesting of
4488 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4489 we don't care (to avoid spending too much time on complex expressions.). */
4490
4491 static int
4492 count_cond (tree expr, int lim)
4493 {
4494 int ctrue, cfalse;
4495
4496 if (TREE_CODE (expr) != COND_EXPR)
4497 return 0;
4498 else if (lim <= 0)
4499 return 0;
4500
4501 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4502 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4503 return MIN (lim, 1 + ctrue + cfalse);
4504 }
4505
4506 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4507 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4508 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4509 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4510 COND is the first argument to CODE; otherwise (as in the example
4511 given here), it is the second argument. TYPE is the type of the
4512 original expression. */
4513
4514 static tree
4515 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
4516 tree cond, tree arg, int cond_first_p)
4517 {
4518 tree test, true_value, false_value;
4519 tree lhs = NULL_TREE;
4520 tree rhs = NULL_TREE;
4521 /* In the end, we'll produce a COND_EXPR. Both arms of the
4522 conditional expression will be binary operations. The left-hand
4523 side of the expression to be executed if the condition is true
4524 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4525 of the expression to be executed if the condition is true will be
4526 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4527 but apply to the expression to be executed if the conditional is
4528 false. */
4529 tree *true_lhs;
4530 tree *true_rhs;
4531 tree *false_lhs;
4532 tree *false_rhs;
4533 /* These are the codes to use for the left-hand side and right-hand
4534 side of the COND_EXPR. Normally, they are the same as CODE. */
4535 enum tree_code lhs_code = code;
4536 enum tree_code rhs_code = code;
4537 /* And these are the types of the expressions. */
4538 tree lhs_type = type;
4539 tree rhs_type = type;
4540 int save = 0;
4541
4542 if (cond_first_p)
4543 {
4544 true_rhs = false_rhs = &arg;
4545 true_lhs = &true_value;
4546 false_lhs = &false_value;
4547 }
4548 else
4549 {
4550 true_lhs = false_lhs = &arg;
4551 true_rhs = &true_value;
4552 false_rhs = &false_value;
4553 }
4554
4555 if (TREE_CODE (cond) == COND_EXPR)
4556 {
4557 test = TREE_OPERAND (cond, 0);
4558 true_value = TREE_OPERAND (cond, 1);
4559 false_value = TREE_OPERAND (cond, 2);
4560 /* If this operand throws an expression, then it does not make
4561 sense to try to perform a logical or arithmetic operation
4562 involving it. Instead of building `a + throw 3' for example,
4563 we simply build `a, throw 3'. */
4564 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4565 {
4566 if (! cond_first_p)
4567 {
4568 lhs_code = COMPOUND_EXPR;
4569 lhs_type = void_type_node;
4570 }
4571 else
4572 lhs = true_value;
4573 }
4574 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4575 {
4576 if (! cond_first_p)
4577 {
4578 rhs_code = COMPOUND_EXPR;
4579 rhs_type = void_type_node;
4580 }
4581 else
4582 rhs = false_value;
4583 }
4584 }
4585 else
4586 {
4587 tree testtype = TREE_TYPE (cond);
4588 test = cond;
4589 true_value = convert (testtype, integer_one_node);
4590 false_value = convert (testtype, integer_zero_node);
4591 }
4592
4593 /* If ARG is complex we want to make sure we only evaluate it once. Though
4594 this is only required if it is volatile, it might be more efficient even
4595 if it is not. However, if we succeed in folding one part to a constant,
4596 we do not need to make this SAVE_EXPR. Since we do this optimization
4597 primarily to see if we do end up with constant and this SAVE_EXPR
4598 interferes with later optimizations, suppressing it when we can is
4599 important.
4600
4601 If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4602 do so. Don't try to see if the result is a constant if an arm is a
4603 COND_EXPR since we get exponential behavior in that case. */
4604
4605 if (saved_expr_p (arg))
4606 save = 1;
4607 else if (lhs == 0 && rhs == 0
4608 && !TREE_CONSTANT (arg)
4609 && (*lang_hooks.decls.global_bindings_p) () == 0
4610 && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4611 || TREE_SIDE_EFFECTS (arg)))
4612 {
4613 if (TREE_CODE (true_value) != COND_EXPR)
4614 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4615
4616 if (TREE_CODE (false_value) != COND_EXPR)
4617 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4618
4619 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4620 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4621 {
4622 arg = save_expr (arg);
4623 lhs = rhs = 0;
4624 save = 1;
4625 }
4626 }
4627
4628 if (lhs == 0)
4629 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4630 if (rhs == 0)
4631 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4632
4633 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4634
4635 if (save)
4636 return build (COMPOUND_EXPR, type,
4637 convert (void_type_node, arg),
4638 strip_compound_expr (test, arg));
4639 else
4640 return convert (type, test);
4641 }
4642
4643 \f
4644 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4645
4646 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4647 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4648 ADDEND is the same as X.
4649
4650 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4651 and finite. The problematic cases are when X is zero, and its mode
4652 has signed zeros. In the case of rounding towards -infinity,
4653 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4654 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4655
4656 static bool
4657 fold_real_zero_addition_p (tree type, tree addend, int negate)
4658 {
4659 if (!real_zerop (addend))
4660 return false;
4661
4662 /* Don't allow the fold with -fsignaling-nans. */
4663 if (HONOR_SNANS (TYPE_MODE (type)))
4664 return false;
4665
4666 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4667 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4668 return true;
4669
4670 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4671 if (TREE_CODE (addend) == REAL_CST
4672 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4673 negate = !negate;
4674
4675 /* The mode has signed zeros, and we have to honor their sign.
4676 In this situation, there is only one case we can return true for.
4677 X - 0 is the same as X unless rounding towards -infinity is
4678 supported. */
4679 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4680 }
4681
4682 /* Subroutine of fold() that checks comparisons of built-in math
4683 functions against real constants.
4684
4685 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
4686 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
4687 is the type of the result and ARG0 and ARG1 are the operands of the
4688 comparison. ARG1 must be a TREE_REAL_CST.
4689
4690 The function returns the constant folded tree if a simplification
4691 can be made, and NULL_TREE otherwise. */
4692
4693 static tree
4694 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
4695 tree type, tree arg0, tree arg1)
4696 {
4697 REAL_VALUE_TYPE c;
4698
4699 if (fcode == BUILT_IN_SQRT
4700 || fcode == BUILT_IN_SQRTF
4701 || fcode == BUILT_IN_SQRTL)
4702 {
4703 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
4704 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
4705
4706 c = TREE_REAL_CST (arg1);
4707 if (REAL_VALUE_NEGATIVE (c))
4708 {
4709 /* sqrt(x) < y is always false, if y is negative. */
4710 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
4711 return omit_one_operand (type,
4712 convert (type, integer_zero_node),
4713 arg);
4714
4715 /* sqrt(x) > y is always true, if y is negative and we
4716 don't care about NaNs, i.e. negative values of x. */
4717 if (code == NE_EXPR || !HONOR_NANS (mode))
4718 return omit_one_operand (type,
4719 convert (type, integer_one_node),
4720 arg);
4721
4722 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
4723 return fold (build (GE_EXPR, type, arg,
4724 build_real (TREE_TYPE (arg), dconst0)));
4725 }
4726 else if (code == GT_EXPR || code == GE_EXPR)
4727 {
4728 REAL_VALUE_TYPE c2;
4729
4730 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4731 real_convert (&c2, mode, &c2);
4732
4733 if (REAL_VALUE_ISINF (c2))
4734 {
4735 /* sqrt(x) > y is x == +Inf, when y is very large. */
4736 if (HONOR_INFINITIES (mode))
4737 return fold (build (EQ_EXPR, type, arg,
4738 build_real (TREE_TYPE (arg), c2)));
4739
4740 /* sqrt(x) > y is always false, when y is very large
4741 and we don't care about infinities. */
4742 return omit_one_operand (type,
4743 convert (type, integer_zero_node),
4744 arg);
4745 }
4746
4747 /* sqrt(x) > c is the same as x > c*c. */
4748 return fold (build (code, type, arg,
4749 build_real (TREE_TYPE (arg), c2)));
4750 }
4751 else if (code == LT_EXPR || code == LE_EXPR)
4752 {
4753 REAL_VALUE_TYPE c2;
4754
4755 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4756 real_convert (&c2, mode, &c2);
4757
4758 if (REAL_VALUE_ISINF (c2))
4759 {
4760 /* sqrt(x) < y is always true, when y is a very large
4761 value and we don't care about NaNs or Infinities. */
4762 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
4763 return omit_one_operand (type,
4764 convert (type, integer_one_node),
4765 arg);
4766
4767 /* sqrt(x) < y is x != +Inf when y is very large and we
4768 don't care about NaNs. */
4769 if (! HONOR_NANS (mode))
4770 return fold (build (NE_EXPR, type, arg,
4771 build_real (TREE_TYPE (arg), c2)));
4772
4773 /* sqrt(x) < y is x >= 0 when y is very large and we
4774 don't care about Infinities. */
4775 if (! HONOR_INFINITIES (mode))
4776 return fold (build (GE_EXPR, type, arg,
4777 build_real (TREE_TYPE (arg), dconst0)));
4778
4779 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
4780 if ((*lang_hooks.decls.global_bindings_p) () != 0
4781 || CONTAINS_PLACEHOLDER_P (arg))
4782 return NULL_TREE;
4783
4784 arg = save_expr (arg);
4785 return fold (build (TRUTH_ANDIF_EXPR, type,
4786 fold (build (GE_EXPR, type, arg,
4787 build_real (TREE_TYPE (arg),
4788 dconst0))),
4789 fold (build (NE_EXPR, type, arg,
4790 build_real (TREE_TYPE (arg),
4791 c2)))));
4792 }
4793
4794 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
4795 if (! HONOR_NANS (mode))
4796 return fold (build (code, type, arg,
4797 build_real (TREE_TYPE (arg), c2)));
4798
4799 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
4800 if ((*lang_hooks.decls.global_bindings_p) () == 0
4801 && ! CONTAINS_PLACEHOLDER_P (arg))
4802 {
4803 arg = save_expr (arg);
4804 return fold (build (TRUTH_ANDIF_EXPR, type,
4805 fold (build (GE_EXPR, type, arg,
4806 build_real (TREE_TYPE (arg),
4807 dconst0))),
4808 fold (build (code, type, arg,
4809 build_real (TREE_TYPE (arg),
4810 c2)))));
4811 }
4812 }
4813 }
4814
4815 return NULL_TREE;
4816 }
4817
4818 /* Subroutine of fold() that optimizes comparisons against Infinities,
4819 either +Inf or -Inf.
4820
4821 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
4822 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
4823 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
4824
4825 The function returns the constant folded tree if a simplification
4826 can be made, and NULL_TREE otherwise. */
4827
4828 static tree
4829 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
4830 {
4831 enum machine_mode mode;
4832 REAL_VALUE_TYPE max;
4833 tree temp;
4834 bool neg;
4835
4836 mode = TYPE_MODE (TREE_TYPE (arg0));
4837
4838 /* For negative infinity swap the sense of the comparison. */
4839 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
4840 if (neg)
4841 code = swap_tree_comparison (code);
4842
4843 switch (code)
4844 {
4845 case GT_EXPR:
4846 /* x > +Inf is always false, if with ignore sNANs. */
4847 if (HONOR_SNANS (mode))
4848 return NULL_TREE;
4849 return omit_one_operand (type,
4850 convert (type, integer_zero_node),
4851 arg0);
4852
4853 case LE_EXPR:
4854 /* x <= +Inf is always true, if we don't case about NaNs. */
4855 if (! HONOR_NANS (mode))
4856 return omit_one_operand (type,
4857 convert (type, integer_one_node),
4858 arg0);
4859
4860 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
4861 if ((*lang_hooks.decls.global_bindings_p) () == 0
4862 && ! CONTAINS_PLACEHOLDER_P (arg0))
4863 {
4864 arg0 = save_expr (arg0);
4865 return fold (build (EQ_EXPR, type, arg0, arg0));
4866 }
4867 break;
4868
4869 case EQ_EXPR:
4870 case GE_EXPR:
4871 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
4872 real_maxval (&max, neg, mode);
4873 return fold (build (neg ? LT_EXPR : GT_EXPR, type,
4874 arg0, build_real (TREE_TYPE (arg0), max)));
4875
4876 case LT_EXPR:
4877 /* x < +Inf is always equal to x <= DBL_MAX. */
4878 real_maxval (&max, neg, mode);
4879 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
4880 arg0, build_real (TREE_TYPE (arg0), max)));
4881
4882 case NE_EXPR:
4883 /* x != +Inf is always equal to !(x > DBL_MAX). */
4884 real_maxval (&max, neg, mode);
4885 if (! HONOR_NANS (mode))
4886 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
4887 arg0, build_real (TREE_TYPE (arg0), max)));
4888 temp = fold (build (neg ? LT_EXPR : GT_EXPR, type,
4889 arg0, build_real (TREE_TYPE (arg0), max)));
4890 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
4891
4892 default:
4893 break;
4894 }
4895
4896 return NULL_TREE;
4897 }
4898
4899 /* If CODE with arguments ARG0 and ARG1 represents a single bit
4900 equality/inequality test, then return a simplified form of
4901 the test using shifts and logical operations. Otherwise return
4902 NULL. TYPE is the desired result type. */
4903
4904 tree
4905 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
4906 tree result_type)
4907 {
4908 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
4909 operand 0. */
4910 if (code == TRUTH_NOT_EXPR)
4911 {
4912 code = TREE_CODE (arg0);
4913 if (code != NE_EXPR && code != EQ_EXPR)
4914 return NULL_TREE;
4915
4916 /* Extract the arguments of the EQ/NE. */
4917 arg1 = TREE_OPERAND (arg0, 1);
4918 arg0 = TREE_OPERAND (arg0, 0);
4919
4920 /* This requires us to invert the code. */
4921 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
4922 }
4923
4924 /* If this is testing a single bit, we can optimize the test. */
4925 if ((code == NE_EXPR || code == EQ_EXPR)
4926 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
4927 && integer_pow2p (TREE_OPERAND (arg0, 1)))
4928 {
4929 tree inner = TREE_OPERAND (arg0, 0);
4930 tree type = TREE_TYPE (arg0);
4931 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
4932 enum machine_mode operand_mode = TYPE_MODE (type);
4933 int ops_unsigned;
4934 tree signed_type, unsigned_type;
4935 tree arg00;
4936
4937 /* If we have (A & C) != 0 where C is the sign bit of A, convert
4938 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
4939 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
4940 if (arg00 != NULL_TREE)
4941 {
4942 tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
4943 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, result_type,
4944 convert (stype, arg00),
4945 convert (stype, integer_zero_node)));
4946 }
4947
4948 /* At this point, we know that arg0 is not testing the sign bit. */
4949 if (TYPE_PRECISION (type) - 1 == bitnum)
4950 abort ();
4951
4952 /* Otherwise we have (A & C) != 0 where C is a single bit,
4953 convert that into ((A >> C2) & 1). Where C2 = log2(C).
4954 Similarly for (A & C) == 0. */
4955
4956 /* If INNER is a right shift of a constant and it plus BITNUM does
4957 not overflow, adjust BITNUM and INNER. */
4958 if (TREE_CODE (inner) == RSHIFT_EXPR
4959 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
4960 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
4961 && bitnum < TYPE_PRECISION (type)
4962 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
4963 bitnum - TYPE_PRECISION (type)))
4964 {
4965 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
4966 inner = TREE_OPERAND (inner, 0);
4967 }
4968
4969 /* If we are going to be able to omit the AND below, we must do our
4970 operations as unsigned. If we must use the AND, we have a choice.
4971 Normally unsigned is faster, but for some machines signed is. */
4972 #ifdef LOAD_EXTEND_OP
4973 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
4974 #else
4975 ops_unsigned = 1;
4976 #endif
4977
4978 signed_type = (*lang_hooks.types.type_for_mode) (operand_mode, 0);
4979 unsigned_type = (*lang_hooks.types.type_for_mode) (operand_mode, 1);
4980
4981 if (bitnum != 0)
4982 inner = build (RSHIFT_EXPR, ops_unsigned ? unsigned_type : signed_type,
4983 inner, size_int (bitnum));
4984
4985 if (code == EQ_EXPR)
4986 inner = build (BIT_XOR_EXPR, ops_unsigned ? unsigned_type : signed_type,
4987 inner, integer_one_node);
4988
4989 /* Put the AND last so it can combine with more things. */
4990 inner = build (BIT_AND_EXPR, ops_unsigned ? unsigned_type : signed_type,
4991 inner, integer_one_node);
4992
4993 /* Make sure to return the proper type. */
4994 if (TREE_TYPE (inner) != result_type)
4995 inner = convert (result_type, inner);
4996
4997 return inner;
4998 }
4999 return NULL_TREE;
5000 }
5001
5002 /* Test whether it is preferable two swap two operands, ARG0 and
5003 ARG1, for example because ARG0 is an integer constant and ARG1
5004 isn't. */
5005
5006 static bool
5007 tree_swap_operands_p (tree arg0, tree arg1)
5008 {
5009 STRIP_SIGN_NOPS (arg0);
5010 STRIP_SIGN_NOPS (arg1);
5011
5012 if (TREE_CODE (arg1) == INTEGER_CST)
5013 return 0;
5014 if (TREE_CODE (arg0) == INTEGER_CST)
5015 return 1;
5016
5017 if (TREE_CODE (arg1) == REAL_CST)
5018 return 0;
5019 if (TREE_CODE (arg0) == REAL_CST)
5020 return 1;
5021
5022 if (TREE_CODE (arg1) == COMPLEX_CST)
5023 return 0;
5024 if (TREE_CODE (arg0) == COMPLEX_CST)
5025 return 1;
5026
5027 if (TREE_CONSTANT (arg1))
5028 return 0;
5029 if (TREE_CONSTANT (arg0))
5030 return 1;
5031
5032 return 0;
5033 }
5034
5035 /* Perform constant folding and related simplification of EXPR.
5036 The related simplifications include x*1 => x, x*0 => 0, etc.,
5037 and application of the associative law.
5038 NOP_EXPR conversions may be removed freely (as long as we
5039 are careful not to change the C type of the overall expression)
5040 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5041 but we can constant-fold them if they have constant operands. */
5042
5043 #ifdef ENABLE_FOLD_CHECKING
5044 # define fold(x) fold_1 (x)
5045 static tree fold_1 (tree);
5046 static
5047 #endif
5048 tree
5049 fold (tree expr)
5050 {
5051 tree t = expr, orig_t;
5052 tree t1 = NULL_TREE;
5053 tree tem;
5054 tree type = TREE_TYPE (expr);
5055 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5056 enum tree_code code = TREE_CODE (t);
5057 int kind = TREE_CODE_CLASS (code);
5058 int invert;
5059 /* WINS will be nonzero when the switch is done
5060 if all operands are constant. */
5061 int wins = 1;
5062
5063 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5064 Likewise for a SAVE_EXPR that's already been evaluated. */
5065 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5066 return t;
5067
5068 /* Return right away if a constant. */
5069 if (kind == 'c')
5070 return t;
5071
5072 #ifdef MAX_INTEGER_COMPUTATION_MODE
5073 check_max_integer_computation_mode (expr);
5074 #endif
5075 orig_t = t;
5076
5077 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5078 {
5079 tree subop;
5080
5081 /* Special case for conversion ops that can have fixed point args. */
5082 arg0 = TREE_OPERAND (t, 0);
5083
5084 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5085 if (arg0 != 0)
5086 STRIP_SIGN_NOPS (arg0);
5087
5088 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5089 subop = TREE_REALPART (arg0);
5090 else
5091 subop = arg0;
5092
5093 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5094 && TREE_CODE (subop) != REAL_CST)
5095 /* Note that TREE_CONSTANT isn't enough:
5096 static var addresses are constant but we can't
5097 do arithmetic on them. */
5098 wins = 0;
5099 }
5100 else if (IS_EXPR_CODE_CLASS (kind))
5101 {
5102 int len = first_rtl_op (code);
5103 int i;
5104 for (i = 0; i < len; i++)
5105 {
5106 tree op = TREE_OPERAND (t, i);
5107 tree subop;
5108
5109 if (op == 0)
5110 continue; /* Valid for CALL_EXPR, at least. */
5111
5112 if (kind == '<' || code == RSHIFT_EXPR)
5113 {
5114 /* Signedness matters here. Perhaps we can refine this
5115 later. */
5116 STRIP_SIGN_NOPS (op);
5117 }
5118 else
5119 /* Strip any conversions that don't change the mode. */
5120 STRIP_NOPS (op);
5121
5122 if (TREE_CODE (op) == COMPLEX_CST)
5123 subop = TREE_REALPART (op);
5124 else
5125 subop = op;
5126
5127 if (TREE_CODE (subop) != INTEGER_CST
5128 && TREE_CODE (subop) != REAL_CST)
5129 /* Note that TREE_CONSTANT isn't enough:
5130 static var addresses are constant but we can't
5131 do arithmetic on them. */
5132 wins = 0;
5133
5134 if (i == 0)
5135 arg0 = op;
5136 else if (i == 1)
5137 arg1 = op;
5138 }
5139 }
5140
5141 /* If this is a commutative operation, and ARG0 is a constant, move it
5142 to ARG1 to reduce the number of tests below. */
5143 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
5144 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
5145 || code == BIT_AND_EXPR)
5146 && tree_swap_operands_p (arg0, arg1))
5147 return fold (build (code, type, arg1, arg0));
5148
5149 /* Now WINS is set as described above,
5150 ARG0 is the first operand of EXPR,
5151 and ARG1 is the second operand (if it has more than one operand).
5152
5153 First check for cases where an arithmetic operation is applied to a
5154 compound, conditional, or comparison operation. Push the arithmetic
5155 operation inside the compound or conditional to see if any folding
5156 can then be done. Convert comparison to conditional for this purpose.
5157 The also optimizes non-constant cases that used to be done in
5158 expand_expr.
5159
5160 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5161 one of the operands is a comparison and the other is a comparison, a
5162 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5163 code below would make the expression more complex. Change it to a
5164 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5165 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5166
5167 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5168 || code == EQ_EXPR || code == NE_EXPR)
5169 && ((truth_value_p (TREE_CODE (arg0))
5170 && (truth_value_p (TREE_CODE (arg1))
5171 || (TREE_CODE (arg1) == BIT_AND_EXPR
5172 && integer_onep (TREE_OPERAND (arg1, 1)))))
5173 || (truth_value_p (TREE_CODE (arg1))
5174 && (truth_value_p (TREE_CODE (arg0))
5175 || (TREE_CODE (arg0) == BIT_AND_EXPR
5176 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5177 {
5178 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5179 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5180 : TRUTH_XOR_EXPR,
5181 type, arg0, arg1));
5182
5183 if (code == EQ_EXPR)
5184 t = invert_truthvalue (t);
5185
5186 return t;
5187 }
5188
5189 if (TREE_CODE_CLASS (code) == '1')
5190 {
5191 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5192 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5193 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5194 else if (TREE_CODE (arg0) == COND_EXPR)
5195 {
5196 tree arg01 = TREE_OPERAND (arg0, 1);
5197 tree arg02 = TREE_OPERAND (arg0, 2);
5198 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5199 arg01 = fold (build1 (code, type, arg01));
5200 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5201 arg02 = fold (build1 (code, type, arg02));
5202 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5203 arg01, arg02));
5204
5205 /* If this was a conversion, and all we did was to move into
5206 inside the COND_EXPR, bring it back out. But leave it if
5207 it is a conversion from integer to integer and the
5208 result precision is no wider than a word since such a
5209 conversion is cheap and may be optimized away by combine,
5210 while it couldn't if it were outside the COND_EXPR. Then return
5211 so we don't get into an infinite recursion loop taking the
5212 conversion out and then back in. */
5213
5214 if ((code == NOP_EXPR || code == CONVERT_EXPR
5215 || code == NON_LVALUE_EXPR)
5216 && TREE_CODE (t) == COND_EXPR
5217 && TREE_CODE (TREE_OPERAND (t, 1)) == code
5218 && TREE_CODE (TREE_OPERAND (t, 2)) == code
5219 && ! VOID_TYPE_P (TREE_OPERAND (t, 1))
5220 && ! VOID_TYPE_P (TREE_OPERAND (t, 2))
5221 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
5222 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
5223 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
5224 && (INTEGRAL_TYPE_P
5225 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
5226 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
5227 t = build1 (code, type,
5228 build (COND_EXPR,
5229 TREE_TYPE (TREE_OPERAND
5230 (TREE_OPERAND (t, 1), 0)),
5231 TREE_OPERAND (t, 0),
5232 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
5233 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
5234 return t;
5235 }
5236 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5237 return fold (build (COND_EXPR, type, arg0,
5238 fold (build1 (code, type, integer_one_node)),
5239 fold (build1 (code, type, integer_zero_node))));
5240 }
5241 else if (TREE_CODE_CLASS (code) == '<'
5242 && TREE_CODE (arg0) == COMPOUND_EXPR)
5243 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5244 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5245 else if (TREE_CODE_CLASS (code) == '<'
5246 && TREE_CODE (arg1) == COMPOUND_EXPR)
5247 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5248 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5249 else if (TREE_CODE_CLASS (code) == '2'
5250 || TREE_CODE_CLASS (code) == '<')
5251 {
5252 if (TREE_CODE (arg1) == COMPOUND_EXPR
5253 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1, 0))
5254 && ! TREE_SIDE_EFFECTS (arg0))
5255 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5256 fold (build (code, type,
5257 arg0, TREE_OPERAND (arg1, 1))));
5258 else if ((TREE_CODE (arg1) == COND_EXPR
5259 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
5260 && TREE_CODE_CLASS (code) != '<'))
5261 && (TREE_CODE (arg0) != COND_EXPR
5262 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5263 && (! TREE_SIDE_EFFECTS (arg0)
5264 || ((*lang_hooks.decls.global_bindings_p) () == 0
5265 && ! CONTAINS_PLACEHOLDER_P (arg0))))
5266 return
5267 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5268 /*cond_first_p=*/0);
5269 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
5270 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5271 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5272 else if ((TREE_CODE (arg0) == COND_EXPR
5273 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5274 && TREE_CODE_CLASS (code) != '<'))
5275 && (TREE_CODE (arg1) != COND_EXPR
5276 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5277 && (! TREE_SIDE_EFFECTS (arg1)
5278 || ((*lang_hooks.decls.global_bindings_p) () == 0
5279 && ! CONTAINS_PLACEHOLDER_P (arg1))))
5280 return
5281 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5282 /*cond_first_p=*/1);
5283 }
5284
5285 switch (code)
5286 {
5287 case INTEGER_CST:
5288 case REAL_CST:
5289 case VECTOR_CST:
5290 case STRING_CST:
5291 case COMPLEX_CST:
5292 case CONSTRUCTOR:
5293 return t;
5294
5295 case CONST_DECL:
5296 return fold (DECL_INITIAL (t));
5297
5298 case NOP_EXPR:
5299 case FLOAT_EXPR:
5300 case CONVERT_EXPR:
5301 case FIX_TRUNC_EXPR:
5302 /* Other kinds of FIX are not handled properly by fold_convert. */
5303
5304 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
5305 return TREE_OPERAND (t, 0);
5306
5307 /* Handle cases of two conversions in a row. */
5308 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5309 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5310 {
5311 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5312 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5313 tree final_type = TREE_TYPE (t);
5314 int inside_int = INTEGRAL_TYPE_P (inside_type);
5315 int inside_ptr = POINTER_TYPE_P (inside_type);
5316 int inside_float = FLOAT_TYPE_P (inside_type);
5317 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5318 int inside_unsignedp = TREE_UNSIGNED (inside_type);
5319 int inter_int = INTEGRAL_TYPE_P (inter_type);
5320 int inter_ptr = POINTER_TYPE_P (inter_type);
5321 int inter_float = FLOAT_TYPE_P (inter_type);
5322 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5323 int inter_unsignedp = TREE_UNSIGNED (inter_type);
5324 int final_int = INTEGRAL_TYPE_P (final_type);
5325 int final_ptr = POINTER_TYPE_P (final_type);
5326 int final_float = FLOAT_TYPE_P (final_type);
5327 unsigned int final_prec = TYPE_PRECISION (final_type);
5328 int final_unsignedp = TREE_UNSIGNED (final_type);
5329
5330 /* In addition to the cases of two conversions in a row
5331 handled below, if we are converting something to its own
5332 type via an object of identical or wider precision, neither
5333 conversion is needed. */
5334 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
5335 && ((inter_int && final_int) || (inter_float && final_float))
5336 && inter_prec >= final_prec)
5337 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5338
5339 /* Likewise, if the intermediate and final types are either both
5340 float or both integer, we don't need the middle conversion if
5341 it is wider than the final type and doesn't change the signedness
5342 (for integers). Avoid this if the final type is a pointer
5343 since then we sometimes need the inner conversion. Likewise if
5344 the outer has a precision not equal to the size of its mode. */
5345 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5346 || (inter_float && inside_float))
5347 && inter_prec >= inside_prec
5348 && (inter_float || inter_unsignedp == inside_unsignedp)
5349 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5350 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5351 && ! final_ptr)
5352 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5353
5354 /* If we have a sign-extension of a zero-extended value, we can
5355 replace that by a single zero-extension. */
5356 if (inside_int && inter_int && final_int
5357 && inside_prec < inter_prec && inter_prec < final_prec
5358 && inside_unsignedp && !inter_unsignedp)
5359 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5360
5361 /* Two conversions in a row are not needed unless:
5362 - some conversion is floating-point (overstrict for now), or
5363 - the intermediate type is narrower than both initial and
5364 final, or
5365 - the intermediate type and innermost type differ in signedness,
5366 and the outermost type is wider than the intermediate, or
5367 - the initial type is a pointer type and the precisions of the
5368 intermediate and final types differ, or
5369 - the final type is a pointer type and the precisions of the
5370 initial and intermediate types differ. */
5371 if (! inside_float && ! inter_float && ! final_float
5372 && (inter_prec > inside_prec || inter_prec > final_prec)
5373 && ! (inside_int && inter_int
5374 && inter_unsignedp != inside_unsignedp
5375 && inter_prec < final_prec)
5376 && ((inter_unsignedp && inter_prec > inside_prec)
5377 == (final_unsignedp && final_prec > inter_prec))
5378 && ! (inside_ptr && inter_prec != final_prec)
5379 && ! (final_ptr && inside_prec != inter_prec)
5380 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5381 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5382 && ! final_ptr)
5383 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5384 }
5385
5386 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5387 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5388 /* Detect assigning a bitfield. */
5389 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5390 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5391 {
5392 /* Don't leave an assignment inside a conversion
5393 unless assigning a bitfield. */
5394 tree prev = TREE_OPERAND (t, 0);
5395 if (t == orig_t)
5396 t = copy_node (t);
5397 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
5398 /* First do the assignment, then return converted constant. */
5399 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
5400 TREE_USED (t) = 1;
5401 return t;
5402 }
5403
5404 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5405 constants (if x has signed type, the sign bit cannot be set
5406 in c). This folds extension into the BIT_AND_EXPR. */
5407 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
5408 && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
5409 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5410 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5411 {
5412 tree and = TREE_OPERAND (t, 0);
5413 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5414 int change = 0;
5415
5416 if (TREE_UNSIGNED (TREE_TYPE (and))
5417 || (TYPE_PRECISION (TREE_TYPE (t))
5418 <= TYPE_PRECISION (TREE_TYPE (and))))
5419 change = 1;
5420 else if (TYPE_PRECISION (TREE_TYPE (and1))
5421 <= HOST_BITS_PER_WIDE_INT
5422 && host_integerp (and1, 1))
5423 {
5424 unsigned HOST_WIDE_INT cst;
5425
5426 cst = tree_low_cst (and1, 1);
5427 cst &= (HOST_WIDE_INT) -1
5428 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5429 change = (cst == 0);
5430 #ifdef LOAD_EXTEND_OP
5431 if (change
5432 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5433 == ZERO_EXTEND))
5434 {
5435 tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
5436 and0 = convert (uns, and0);
5437 and1 = convert (uns, and1);
5438 }
5439 #endif
5440 }
5441 if (change)
5442 return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
5443 convert (TREE_TYPE (t), and0),
5444 convert (TREE_TYPE (t), and1)));
5445 }
5446
5447 if (!wins)
5448 {
5449 if (TREE_CONSTANT (t) != TREE_CONSTANT (arg0))
5450 {
5451 if (t == orig_t)
5452 t = copy_node (t);
5453 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
5454 }
5455 return t;
5456 }
5457 return fold_convert (t, arg0);
5458
5459 case VIEW_CONVERT_EXPR:
5460 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5461 return build1 (VIEW_CONVERT_EXPR, type,
5462 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5463 return t;
5464
5465 case COMPONENT_REF:
5466 if (TREE_CODE (arg0) == CONSTRUCTOR
5467 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
5468 {
5469 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5470 if (m)
5471 t = TREE_VALUE (m);
5472 }
5473 return t;
5474
5475 case RANGE_EXPR:
5476 if (TREE_CONSTANT (t) != wins)
5477 {
5478 if (t == orig_t)
5479 t = copy_node (t);
5480 TREE_CONSTANT (t) = wins;
5481 }
5482 return t;
5483
5484 case NEGATE_EXPR:
5485 if (wins)
5486 {
5487 if (TREE_CODE (arg0) == INTEGER_CST)
5488 {
5489 unsigned HOST_WIDE_INT low;
5490 HOST_WIDE_INT high;
5491 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5492 TREE_INT_CST_HIGH (arg0),
5493 &low, &high);
5494 t = build_int_2 (low, high);
5495 TREE_TYPE (t) = type;
5496 TREE_OVERFLOW (t)
5497 = (TREE_OVERFLOW (arg0)
5498 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
5499 TREE_CONSTANT_OVERFLOW (t)
5500 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5501 }
5502 else if (TREE_CODE (arg0) == REAL_CST)
5503 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5504 }
5505 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5506 return TREE_OPERAND (arg0, 0);
5507 /* Convert -((double)float) into (double)(-float). */
5508 else if (TREE_CODE (arg0) == NOP_EXPR
5509 && TREE_CODE (type) == REAL_TYPE)
5510 {
5511 tree targ0 = strip_float_extensions (arg0);
5512 if (targ0 != arg0)
5513 return convert (type, build1 (NEGATE_EXPR, TREE_TYPE (targ0), targ0));
5514
5515 }
5516
5517 /* Convert - (a - b) to (b - a) for non-floating-point. */
5518 else if (TREE_CODE (arg0) == MINUS_EXPR
5519 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
5520 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
5521 TREE_OPERAND (arg0, 0));
5522
5523 /* Convert -f(x) into f(-x) where f is sin, tan or atan. */
5524 switch (builtin_mathfn_code (arg0))
5525 {
5526 case BUILT_IN_SIN:
5527 case BUILT_IN_SINF:
5528 case BUILT_IN_SINL:
5529 case BUILT_IN_TAN:
5530 case BUILT_IN_TANF:
5531 case BUILT_IN_TANL:
5532 case BUILT_IN_ATAN:
5533 case BUILT_IN_ATANF:
5534 case BUILT_IN_ATANL:
5535 if (negate_expr_p (TREE_VALUE (TREE_OPERAND (arg0, 1))))
5536 {
5537 tree fndecl, arg, arglist;
5538
5539 fndecl = get_callee_fndecl (arg0);
5540 arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5541 arg = fold (build1 (NEGATE_EXPR, type, arg));
5542 arglist = build_tree_list (NULL_TREE, arg);
5543 return build_function_call_expr (fndecl, arglist);
5544 }
5545 break;
5546
5547 default:
5548 break;
5549 }
5550 return t;
5551
5552 case ABS_EXPR:
5553 if (wins)
5554 {
5555 if (TREE_CODE (arg0) == INTEGER_CST)
5556 {
5557 /* If the value is unsigned, then the absolute value is
5558 the same as the ordinary value. */
5559 if (TREE_UNSIGNED (type))
5560 return arg0;
5561 /* Similarly, if the value is non-negative. */
5562 else if (INT_CST_LT (integer_minus_one_node, arg0))
5563 return arg0;
5564 /* If the value is negative, then the absolute value is
5565 its negation. */
5566 else
5567 {
5568 unsigned HOST_WIDE_INT low;
5569 HOST_WIDE_INT high;
5570 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5571 TREE_INT_CST_HIGH (arg0),
5572 &low, &high);
5573 t = build_int_2 (low, high);
5574 TREE_TYPE (t) = type;
5575 TREE_OVERFLOW (t)
5576 = (TREE_OVERFLOW (arg0)
5577 | force_fit_type (t, overflow));
5578 TREE_CONSTANT_OVERFLOW (t)
5579 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5580 }
5581 }
5582 else if (TREE_CODE (arg0) == REAL_CST)
5583 {
5584 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5585 t = build_real (type,
5586 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5587 }
5588 }
5589 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5590 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
5591 /* Convert fabs((double)float) into (double)fabsf(float). */
5592 else if (TREE_CODE (arg0) == NOP_EXPR
5593 && TREE_CODE (type) == REAL_TYPE)
5594 {
5595 tree targ0 = strip_float_extensions (arg0);
5596 if (targ0 != arg0)
5597 return convert (type, fold (build1 (ABS_EXPR, TREE_TYPE (targ0),
5598 targ0)));
5599 }
5600 else if (tree_expr_nonnegative_p (arg0))
5601 return arg0;
5602 return t;
5603
5604 case CONJ_EXPR:
5605 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5606 return convert (type, arg0);
5607 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5608 return build (COMPLEX_EXPR, type,
5609 TREE_OPERAND (arg0, 0),
5610 negate_expr (TREE_OPERAND (arg0, 1)));
5611 else if (TREE_CODE (arg0) == COMPLEX_CST)
5612 return build_complex (type, TREE_REALPART (arg0),
5613 negate_expr (TREE_IMAGPART (arg0)));
5614 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5615 return fold (build (TREE_CODE (arg0), type,
5616 fold (build1 (CONJ_EXPR, type,
5617 TREE_OPERAND (arg0, 0))),
5618 fold (build1 (CONJ_EXPR,
5619 type, TREE_OPERAND (arg0, 1)))));
5620 else if (TREE_CODE (arg0) == CONJ_EXPR)
5621 return TREE_OPERAND (arg0, 0);
5622 return t;
5623
5624 case BIT_NOT_EXPR:
5625 if (wins)
5626 {
5627 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5628 ~ TREE_INT_CST_HIGH (arg0));
5629 TREE_TYPE (t) = type;
5630 force_fit_type (t, 0);
5631 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5632 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5633 }
5634 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5635 return TREE_OPERAND (arg0, 0);
5636 return t;
5637
5638 case PLUS_EXPR:
5639 /* A + (-B) -> A - B */
5640 if (TREE_CODE (arg1) == NEGATE_EXPR)
5641 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5642 /* (-A) + B -> B - A */
5643 if (TREE_CODE (arg0) == NEGATE_EXPR)
5644 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5645 else if (! FLOAT_TYPE_P (type))
5646 {
5647 if (integer_zerop (arg1))
5648 return non_lvalue (convert (type, arg0));
5649
5650 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5651 with a constant, and the two constants have no bits in common,
5652 we should treat this as a BIT_IOR_EXPR since this may produce more
5653 simplifications. */
5654 if (TREE_CODE (arg0) == BIT_AND_EXPR
5655 && TREE_CODE (arg1) == BIT_AND_EXPR
5656 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5657 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5658 && integer_zerop (const_binop (BIT_AND_EXPR,
5659 TREE_OPERAND (arg0, 1),
5660 TREE_OPERAND (arg1, 1), 0)))
5661 {
5662 code = BIT_IOR_EXPR;
5663 goto bit_ior;
5664 }
5665
5666 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5667 (plus (plus (mult) (mult)) (foo)) so that we can
5668 take advantage of the factoring cases below. */
5669 if ((TREE_CODE (arg0) == PLUS_EXPR
5670 && TREE_CODE (arg1) == MULT_EXPR)
5671 || (TREE_CODE (arg1) == PLUS_EXPR
5672 && TREE_CODE (arg0) == MULT_EXPR))
5673 {
5674 tree parg0, parg1, parg, marg;
5675
5676 if (TREE_CODE (arg0) == PLUS_EXPR)
5677 parg = arg0, marg = arg1;
5678 else
5679 parg = arg1, marg = arg0;
5680 parg0 = TREE_OPERAND (parg, 0);
5681 parg1 = TREE_OPERAND (parg, 1);
5682 STRIP_NOPS (parg0);
5683 STRIP_NOPS (parg1);
5684
5685 if (TREE_CODE (parg0) == MULT_EXPR
5686 && TREE_CODE (parg1) != MULT_EXPR)
5687 return fold (build (PLUS_EXPR, type,
5688 fold (build (PLUS_EXPR, type,
5689 convert (type, parg0),
5690 convert (type, marg))),
5691 convert (type, parg1)));
5692 if (TREE_CODE (parg0) != MULT_EXPR
5693 && TREE_CODE (parg1) == MULT_EXPR)
5694 return fold (build (PLUS_EXPR, type,
5695 fold (build (PLUS_EXPR, type,
5696 convert (type, parg1),
5697 convert (type, marg))),
5698 convert (type, parg0)));
5699 }
5700
5701 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5702 {
5703 tree arg00, arg01, arg10, arg11;
5704 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5705
5706 /* (A * C) + (B * C) -> (A+B) * C.
5707 We are most concerned about the case where C is a constant,
5708 but other combinations show up during loop reduction. Since
5709 it is not difficult, try all four possibilities. */
5710
5711 arg00 = TREE_OPERAND (arg0, 0);
5712 arg01 = TREE_OPERAND (arg0, 1);
5713 arg10 = TREE_OPERAND (arg1, 0);
5714 arg11 = TREE_OPERAND (arg1, 1);
5715 same = NULL_TREE;
5716
5717 if (operand_equal_p (arg01, arg11, 0))
5718 same = arg01, alt0 = arg00, alt1 = arg10;
5719 else if (operand_equal_p (arg00, arg10, 0))
5720 same = arg00, alt0 = arg01, alt1 = arg11;
5721 else if (operand_equal_p (arg00, arg11, 0))
5722 same = arg00, alt0 = arg01, alt1 = arg10;
5723 else if (operand_equal_p (arg01, arg10, 0))
5724 same = arg01, alt0 = arg00, alt1 = arg11;
5725
5726 /* No identical multiplicands; see if we can find a common
5727 power-of-two factor in non-power-of-two multiplies. This
5728 can help in multi-dimensional array access. */
5729 else if (TREE_CODE (arg01) == INTEGER_CST
5730 && TREE_CODE (arg11) == INTEGER_CST
5731 && TREE_INT_CST_HIGH (arg01) == 0
5732 && TREE_INT_CST_HIGH (arg11) == 0)
5733 {
5734 HOST_WIDE_INT int01, int11, tmp;
5735 int01 = TREE_INT_CST_LOW (arg01);
5736 int11 = TREE_INT_CST_LOW (arg11);
5737
5738 /* Move min of absolute values to int11. */
5739 if ((int01 >= 0 ? int01 : -int01)
5740 < (int11 >= 0 ? int11 : -int11))
5741 {
5742 tmp = int01, int01 = int11, int11 = tmp;
5743 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5744 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5745 }
5746
5747 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5748 {
5749 alt0 = fold (build (MULT_EXPR, type, arg00,
5750 build_int_2 (int01 / int11, 0)));
5751 alt1 = arg10;
5752 same = arg11;
5753 }
5754 }
5755
5756 if (same)
5757 return fold (build (MULT_EXPR, type,
5758 fold (build (PLUS_EXPR, type, alt0, alt1)),
5759 same));
5760 }
5761 }
5762 else
5763 {
5764 /* See if ARG1 is zero and X + ARG1 reduces to X. */
5765 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
5766 return non_lvalue (convert (type, arg0));
5767
5768 /* Likewise if the operands are reversed. */
5769 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5770 return non_lvalue (convert (type, arg1));
5771
5772 /* Convert x+x into x*2.0. */
5773 if (operand_equal_p (arg0, arg1, 0)
5774 && SCALAR_FLOAT_TYPE_P (type))
5775 return fold (build (MULT_EXPR, type, arg0,
5776 build_real (type, dconst2)));
5777
5778 /* Convert x*c+x into x*(c+1). */
5779 if (flag_unsafe_math_optimizations
5780 && TREE_CODE (arg0) == MULT_EXPR
5781 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
5782 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
5783 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
5784 {
5785 REAL_VALUE_TYPE c;
5786
5787 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
5788 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
5789 return fold (build (MULT_EXPR, type, arg1,
5790 build_real (type, c)));
5791 }
5792
5793 /* Convert x+x*c into x*(c+1). */
5794 if (flag_unsafe_math_optimizations
5795 && TREE_CODE (arg1) == MULT_EXPR
5796 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
5797 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
5798 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
5799 {
5800 REAL_VALUE_TYPE c;
5801
5802 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
5803 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
5804 return fold (build (MULT_EXPR, type, arg0,
5805 build_real (type, c)));
5806 }
5807
5808 /* Convert x*c1+x*c2 into x*(c1+c2). */
5809 if (flag_unsafe_math_optimizations
5810 && TREE_CODE (arg0) == MULT_EXPR
5811 && TREE_CODE (arg1) == MULT_EXPR
5812 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
5813 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
5814 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
5815 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
5816 && operand_equal_p (TREE_OPERAND (arg0, 0),
5817 TREE_OPERAND (arg1, 0), 0))
5818 {
5819 REAL_VALUE_TYPE c1, c2;
5820
5821 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
5822 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
5823 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
5824 return fold (build (MULT_EXPR, type,
5825 TREE_OPERAND (arg0, 0),
5826 build_real (type, c1)));
5827 }
5828 }
5829
5830 bit_rotate:
5831 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5832 is a rotate of A by C1 bits. */
5833 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5834 is a rotate of A by B bits. */
5835 {
5836 enum tree_code code0, code1;
5837 code0 = TREE_CODE (arg0);
5838 code1 = TREE_CODE (arg1);
5839 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5840 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5841 && operand_equal_p (TREE_OPERAND (arg0, 0),
5842 TREE_OPERAND (arg1, 0), 0)
5843 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5844 {
5845 tree tree01, tree11;
5846 enum tree_code code01, code11;
5847
5848 tree01 = TREE_OPERAND (arg0, 1);
5849 tree11 = TREE_OPERAND (arg1, 1);
5850 STRIP_NOPS (tree01);
5851 STRIP_NOPS (tree11);
5852 code01 = TREE_CODE (tree01);
5853 code11 = TREE_CODE (tree11);
5854 if (code01 == INTEGER_CST
5855 && code11 == INTEGER_CST
5856 && TREE_INT_CST_HIGH (tree01) == 0
5857 && TREE_INT_CST_HIGH (tree11) == 0
5858 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5859 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5860 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5861 code0 == LSHIFT_EXPR ? tree01 : tree11);
5862 else if (code11 == MINUS_EXPR)
5863 {
5864 tree tree110, tree111;
5865 tree110 = TREE_OPERAND (tree11, 0);
5866 tree111 = TREE_OPERAND (tree11, 1);
5867 STRIP_NOPS (tree110);
5868 STRIP_NOPS (tree111);
5869 if (TREE_CODE (tree110) == INTEGER_CST
5870 && 0 == compare_tree_int (tree110,
5871 TYPE_PRECISION
5872 (TREE_TYPE (TREE_OPERAND
5873 (arg0, 0))))
5874 && operand_equal_p (tree01, tree111, 0))
5875 return build ((code0 == LSHIFT_EXPR
5876 ? LROTATE_EXPR
5877 : RROTATE_EXPR),
5878 type, TREE_OPERAND (arg0, 0), tree01);
5879 }
5880 else if (code01 == MINUS_EXPR)
5881 {
5882 tree tree010, tree011;
5883 tree010 = TREE_OPERAND (tree01, 0);
5884 tree011 = TREE_OPERAND (tree01, 1);
5885 STRIP_NOPS (tree010);
5886 STRIP_NOPS (tree011);
5887 if (TREE_CODE (tree010) == INTEGER_CST
5888 && 0 == compare_tree_int (tree010,
5889 TYPE_PRECISION
5890 (TREE_TYPE (TREE_OPERAND
5891 (arg0, 0))))
5892 && operand_equal_p (tree11, tree011, 0))
5893 return build ((code0 != LSHIFT_EXPR
5894 ? LROTATE_EXPR
5895 : RROTATE_EXPR),
5896 type, TREE_OPERAND (arg0, 0), tree11);
5897 }
5898 }
5899 }
5900
5901 associate:
5902 /* In most languages, can't associate operations on floats through
5903 parentheses. Rather than remember where the parentheses were, we
5904 don't associate floats at all, unless the user has specified
5905 -funsafe-math-optimizations. */
5906
5907 if (! wins
5908 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
5909 {
5910 tree var0, con0, lit0, minus_lit0;
5911 tree var1, con1, lit1, minus_lit1;
5912
5913 /* Split both trees into variables, constants, and literals. Then
5914 associate each group together, the constants with literals,
5915 then the result with variables. This increases the chances of
5916 literals being recombined later and of generating relocatable
5917 expressions for the sum of a constant and literal. */
5918 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
5919 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
5920 code == MINUS_EXPR);
5921
5922 /* Only do something if we found more than two objects. Otherwise,
5923 nothing has changed and we risk infinite recursion. */
5924 if (2 < ((var0 != 0) + (var1 != 0)
5925 + (con0 != 0) + (con1 != 0)
5926 + (lit0 != 0) + (lit1 != 0)
5927 + (minus_lit0 != 0) + (minus_lit1 != 0)))
5928 {
5929 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
5930 if (code == MINUS_EXPR)
5931 code = PLUS_EXPR;
5932
5933 var0 = associate_trees (var0, var1, code, type);
5934 con0 = associate_trees (con0, con1, code, type);
5935 lit0 = associate_trees (lit0, lit1, code, type);
5936 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
5937
5938 /* Preserve the MINUS_EXPR if the negative part of the literal is
5939 greater than the positive part. Otherwise, the multiplicative
5940 folding code (i.e extract_muldiv) may be fooled in case
5941 unsigned constants are subtracted, like in the following
5942 example: ((X*2 + 4) - 8U)/2. */
5943 if (minus_lit0 && lit0)
5944 {
5945 if (TREE_CODE (lit0) == INTEGER_CST
5946 && TREE_CODE (minus_lit0) == INTEGER_CST
5947 && tree_int_cst_lt (lit0, minus_lit0))
5948 {
5949 minus_lit0 = associate_trees (minus_lit0, lit0,
5950 MINUS_EXPR, type);
5951 lit0 = 0;
5952 }
5953 else
5954 {
5955 lit0 = associate_trees (lit0, minus_lit0,
5956 MINUS_EXPR, type);
5957 minus_lit0 = 0;
5958 }
5959 }
5960 if (minus_lit0)
5961 {
5962 if (con0 == 0)
5963 return convert (type, associate_trees (var0, minus_lit0,
5964 MINUS_EXPR, type));
5965 else
5966 {
5967 con0 = associate_trees (con0, minus_lit0,
5968 MINUS_EXPR, type);
5969 return convert (type, associate_trees (var0, con0,
5970 PLUS_EXPR, type));
5971 }
5972 }
5973
5974 con0 = associate_trees (con0, lit0, code, type);
5975 return convert (type, associate_trees (var0, con0, code, type));
5976 }
5977 }
5978
5979 binary:
5980 if (wins)
5981 t1 = const_binop (code, arg0, arg1, 0);
5982 if (t1 != NULL_TREE)
5983 {
5984 /* The return value should always have
5985 the same type as the original expression. */
5986 if (TREE_TYPE (t1) != TREE_TYPE (t))
5987 t1 = convert (TREE_TYPE (t), t1);
5988
5989 return t1;
5990 }
5991 return t;
5992
5993 case MINUS_EXPR:
5994 /* A - (-B) -> A + B */
5995 if (TREE_CODE (arg1) == NEGATE_EXPR)
5996 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5997 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
5998 if (TREE_CODE (arg0) == NEGATE_EXPR
5999 && (FLOAT_TYPE_P (type)
6000 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6001 && negate_expr_p (arg1)
6002 && (! TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
6003 && (! TREE_SIDE_EFFECTS (arg1) || TREE_CONSTANT (arg0)))
6004 return fold (build (MINUS_EXPR, type, negate_expr (arg1),
6005 TREE_OPERAND (arg0, 0)));
6006
6007 if (! FLOAT_TYPE_P (type))
6008 {
6009 if (! wins && integer_zerop (arg0))
6010 return negate_expr (convert (type, arg1));
6011 if (integer_zerop (arg1))
6012 return non_lvalue (convert (type, arg0));
6013
6014 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
6015 about the case where C is a constant, just try one of the
6016 four possibilities. */
6017
6018 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
6019 && operand_equal_p (TREE_OPERAND (arg0, 1),
6020 TREE_OPERAND (arg1, 1), 0))
6021 return fold (build (MULT_EXPR, type,
6022 fold (build (MINUS_EXPR, type,
6023 TREE_OPERAND (arg0, 0),
6024 TREE_OPERAND (arg1, 0))),
6025 TREE_OPERAND (arg0, 1)));
6026
6027 /* Fold A - (A & B) into ~B & A. */
6028 if (!TREE_SIDE_EFFECTS (arg0)
6029 && TREE_CODE (arg1) == BIT_AND_EXPR)
6030 {
6031 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6032 return fold (build (BIT_AND_EXPR, type,
6033 fold (build1 (BIT_NOT_EXPR, type,
6034 TREE_OPERAND (arg1, 0))),
6035 arg0));
6036 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6037 return fold (build (BIT_AND_EXPR, type,
6038 fold (build1 (BIT_NOT_EXPR, type,
6039 TREE_OPERAND (arg1, 1))),
6040 arg0));
6041 }
6042
6043 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6044 any power of 2 minus 1. */
6045 if (TREE_CODE (arg0) == BIT_AND_EXPR
6046 && TREE_CODE (arg1) == BIT_AND_EXPR
6047 && operand_equal_p (TREE_OPERAND (arg0, 0),
6048 TREE_OPERAND (arg1, 0), 0))
6049 {
6050 tree mask0 = TREE_OPERAND (arg0, 1);
6051 tree mask1 = TREE_OPERAND (arg1, 1);
6052 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6053
6054 if (operand_equal_p (tem, mask1, 0))
6055 {
6056 tem = fold (build (BIT_XOR_EXPR, type,
6057 TREE_OPERAND (arg0, 0), mask1));
6058 return fold (build (MINUS_EXPR, type, tem, mask1));
6059 }
6060 }
6061 }
6062
6063 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6064 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6065 return non_lvalue (convert (type, arg0));
6066
6067 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6068 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6069 (-ARG1 + ARG0) reduces to -ARG1. */
6070 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6071 return negate_expr (convert (type, arg1));
6072
6073 /* Fold &x - &x. This can happen from &x.foo - &x.
6074 This is unsafe for certain floats even in non-IEEE formats.
6075 In IEEE, it is unsafe because it does wrong for NaNs.
6076 Also note that operand_equal_p is always false if an operand
6077 is volatile. */
6078
6079 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6080 && operand_equal_p (arg0, arg1, 0))
6081 return convert (type, integer_zero_node);
6082
6083 goto associate;
6084
6085 case MULT_EXPR:
6086 /* (-A) * (-B) -> A * B */
6087 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6088 return fold (build (MULT_EXPR, type,
6089 TREE_OPERAND (arg0, 0),
6090 negate_expr (arg1)));
6091 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6092 return fold (build (MULT_EXPR, type,
6093 negate_expr (arg0),
6094 TREE_OPERAND (arg1, 0)));
6095
6096 if (! FLOAT_TYPE_P (type))
6097 {
6098 if (integer_zerop (arg1))
6099 return omit_one_operand (type, arg1, arg0);
6100 if (integer_onep (arg1))
6101 return non_lvalue (convert (type, arg0));
6102
6103 /* (a * (1 << b)) is (a << b) */
6104 if (TREE_CODE (arg1) == LSHIFT_EXPR
6105 && integer_onep (TREE_OPERAND (arg1, 0)))
6106 return fold (build (LSHIFT_EXPR, type, arg0,
6107 TREE_OPERAND (arg1, 1)));
6108 if (TREE_CODE (arg0) == LSHIFT_EXPR
6109 && integer_onep (TREE_OPERAND (arg0, 0)))
6110 return fold (build (LSHIFT_EXPR, type, arg1,
6111 TREE_OPERAND (arg0, 1)));
6112
6113 if (TREE_CODE (arg1) == INTEGER_CST
6114 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6115 convert (type, arg1),
6116 code, NULL_TREE)))
6117 return convert (type, tem);
6118
6119 }
6120 else
6121 {
6122 /* Maybe fold x * 0 to 0. The expressions aren't the same
6123 when x is NaN, since x * 0 is also NaN. Nor are they the
6124 same in modes with signed zeros, since multiplying a
6125 negative value by 0 gives -0, not +0. */
6126 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6127 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6128 && real_zerop (arg1))
6129 return omit_one_operand (type, arg1, arg0);
6130 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6131 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6132 && real_onep (arg1))
6133 return non_lvalue (convert (type, arg0));
6134
6135 /* Transform x * -1.0 into -x. */
6136 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6137 && real_minus_onep (arg1))
6138 return fold (build1 (NEGATE_EXPR, type, arg0));
6139
6140 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6141 if (flag_unsafe_math_optimizations
6142 && TREE_CODE (arg0) == RDIV_EXPR
6143 && TREE_CODE (arg1) == REAL_CST
6144 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
6145 {
6146 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
6147 arg1, 0);
6148 if (tem)
6149 return fold (build (RDIV_EXPR, type, tem,
6150 TREE_OPERAND (arg0, 1)));
6151 }
6152
6153 if (flag_unsafe_math_optimizations)
6154 {
6155 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6156 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6157
6158 /* Optimizations of sqrt(...)*sqrt(...). */
6159 if ((fcode0 == BUILT_IN_SQRT && fcode1 == BUILT_IN_SQRT)
6160 || (fcode0 == BUILT_IN_SQRTF && fcode1 == BUILT_IN_SQRTF)
6161 || (fcode0 == BUILT_IN_SQRTL && fcode1 == BUILT_IN_SQRTL))
6162 {
6163 tree sqrtfn, arg, arglist;
6164 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6165 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6166
6167 /* Optimize sqrt(x)*sqrt(x) as x. */
6168 if (operand_equal_p (arg00, arg10, 0)
6169 && ! HONOR_SNANS (TYPE_MODE (type)))
6170 return arg00;
6171
6172 /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y). */
6173 sqrtfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6174 arg = fold (build (MULT_EXPR, type, arg00, arg10));
6175 arglist = build_tree_list (NULL_TREE, arg);
6176 return build_function_call_expr (sqrtfn, arglist);
6177 }
6178
6179 /* Optimize expN(x)*expN(y) as expN(x+y). */
6180 if (fcode0 == fcode1
6181 && (fcode0 == BUILT_IN_EXP
6182 || fcode0 == BUILT_IN_EXPF
6183 || fcode0 == BUILT_IN_EXPL
6184 || fcode0 == BUILT_IN_EXP2
6185 || fcode0 == BUILT_IN_EXP2F
6186 || fcode0 == BUILT_IN_EXP2L
6187 || fcode0 == BUILT_IN_EXP10
6188 || fcode0 == BUILT_IN_EXP10F
6189 || fcode0 == BUILT_IN_EXP10L
6190 || fcode0 == BUILT_IN_POW10
6191 || fcode0 == BUILT_IN_POW10F
6192 || fcode0 == BUILT_IN_POW10L))
6193 {
6194 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6195 tree arg = build (PLUS_EXPR, type,
6196 TREE_VALUE (TREE_OPERAND (arg0, 1)),
6197 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6198 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6199 return build_function_call_expr (expfn, arglist);
6200 }
6201
6202 /* Optimizations of pow(...)*pow(...). */
6203 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6204 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6205 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6206 {
6207 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6208 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6209 1)));
6210 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6211 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6212 1)));
6213
6214 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6215 if (operand_equal_p (arg01, arg11, 0))
6216 {
6217 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6218 tree arg = build (MULT_EXPR, type, arg00, arg10);
6219 tree arglist = tree_cons (NULL_TREE, fold (arg),
6220 build_tree_list (NULL_TREE,
6221 arg01));
6222 return build_function_call_expr (powfn, arglist);
6223 }
6224
6225 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6226 if (operand_equal_p (arg00, arg10, 0))
6227 {
6228 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6229 tree arg = fold (build (PLUS_EXPR, type, arg01, arg11));
6230 tree arglist = tree_cons (NULL_TREE, arg00,
6231 build_tree_list (NULL_TREE,
6232 arg));
6233 return build_function_call_expr (powfn, arglist);
6234 }
6235 }
6236
6237 /* Optimize tan(x)*cos(x) as sin(x). */
6238 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6239 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6240 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6241 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6242 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6243 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6244 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6245 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6246 {
6247 tree sinfn;
6248
6249 switch (fcode0)
6250 {
6251 case BUILT_IN_TAN:
6252 case BUILT_IN_COS:
6253 sinfn = implicit_built_in_decls[BUILT_IN_SIN];
6254 break;
6255 case BUILT_IN_TANF:
6256 case BUILT_IN_COSF:
6257 sinfn = implicit_built_in_decls[BUILT_IN_SINF];
6258 break;
6259 case BUILT_IN_TANL:
6260 case BUILT_IN_COSL:
6261 sinfn = implicit_built_in_decls[BUILT_IN_SINL];
6262 break;
6263 default:
6264 sinfn = NULL_TREE;
6265 }
6266
6267 if (sinfn != NULL_TREE)
6268 return build_function_call_expr (sinfn,
6269 TREE_OPERAND (arg0, 1));
6270 }
6271
6272 /* Optimize x*pow(x,c) as pow(x,c+1). */
6273 if (fcode1 == BUILT_IN_POW
6274 || fcode1 == BUILT_IN_POWF
6275 || fcode1 == BUILT_IN_POWL)
6276 {
6277 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6278 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6279 1)));
6280 if (TREE_CODE (arg11) == REAL_CST
6281 && ! TREE_CONSTANT_OVERFLOW (arg11)
6282 && operand_equal_p (arg0, arg10, 0))
6283 {
6284 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6285 REAL_VALUE_TYPE c;
6286 tree arg, arglist;
6287
6288 c = TREE_REAL_CST (arg11);
6289 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6290 arg = build_real (type, c);
6291 arglist = build_tree_list (NULL_TREE, arg);
6292 arglist = tree_cons (NULL_TREE, arg0, arglist);
6293 return build_function_call_expr (powfn, arglist);
6294 }
6295 }
6296
6297 /* Optimize pow(x,c)*x as pow(x,c+1). */
6298 if (fcode0 == BUILT_IN_POW
6299 || fcode0 == BUILT_IN_POWF
6300 || fcode0 == BUILT_IN_POWL)
6301 {
6302 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6303 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6304 1)));
6305 if (TREE_CODE (arg01) == REAL_CST
6306 && ! TREE_CONSTANT_OVERFLOW (arg01)
6307 && operand_equal_p (arg1, arg00, 0))
6308 {
6309 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6310 REAL_VALUE_TYPE c;
6311 tree arg, arglist;
6312
6313 c = TREE_REAL_CST (arg01);
6314 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6315 arg = build_real (type, c);
6316 arglist = build_tree_list (NULL_TREE, arg);
6317 arglist = tree_cons (NULL_TREE, arg1, arglist);
6318 return build_function_call_expr (powfn, arglist);
6319 }
6320 }
6321
6322 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6323 if (! optimize_size
6324 && operand_equal_p (arg0, arg1, 0))
6325 {
6326 tree powfn;
6327
6328 if (type == double_type_node)
6329 powfn = implicit_built_in_decls[BUILT_IN_POW];
6330 else if (type == float_type_node)
6331 powfn = implicit_built_in_decls[BUILT_IN_POWF];
6332 else if (type == long_double_type_node)
6333 powfn = implicit_built_in_decls[BUILT_IN_POWL];
6334 else
6335 powfn = NULL_TREE;
6336
6337 if (powfn)
6338 {
6339 tree arg = build_real (type, dconst2);
6340 tree arglist = build_tree_list (NULL_TREE, arg);
6341 arglist = tree_cons (NULL_TREE, arg0, arglist);
6342 return build_function_call_expr (powfn, arglist);
6343 }
6344 }
6345 }
6346 }
6347 goto associate;
6348
6349 case BIT_IOR_EXPR:
6350 bit_ior:
6351 if (integer_all_onesp (arg1))
6352 return omit_one_operand (type, arg1, arg0);
6353 if (integer_zerop (arg1))
6354 return non_lvalue (convert (type, arg0));
6355 t1 = distribute_bit_expr (code, type, arg0, arg1);
6356 if (t1 != NULL_TREE)
6357 return t1;
6358
6359 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6360
6361 This results in more efficient code for machines without a NAND
6362 instruction. Combine will canonicalize to the first form
6363 which will allow use of NAND instructions provided by the
6364 backend if they exist. */
6365 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6366 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6367 {
6368 return fold (build1 (BIT_NOT_EXPR, type,
6369 build (BIT_AND_EXPR, type,
6370 TREE_OPERAND (arg0, 0),
6371 TREE_OPERAND (arg1, 0))));
6372 }
6373
6374 /* See if this can be simplified into a rotate first. If that
6375 is unsuccessful continue in the association code. */
6376 goto bit_rotate;
6377
6378 case BIT_XOR_EXPR:
6379 if (integer_zerop (arg1))
6380 return non_lvalue (convert (type, arg0));
6381 if (integer_all_onesp (arg1))
6382 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6383
6384 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6385 with a constant, and the two constants have no bits in common,
6386 we should treat this as a BIT_IOR_EXPR since this may produce more
6387 simplifications. */
6388 if (TREE_CODE (arg0) == BIT_AND_EXPR
6389 && TREE_CODE (arg1) == BIT_AND_EXPR
6390 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6391 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6392 && integer_zerop (const_binop (BIT_AND_EXPR,
6393 TREE_OPERAND (arg0, 1),
6394 TREE_OPERAND (arg1, 1), 0)))
6395 {
6396 code = BIT_IOR_EXPR;
6397 goto bit_ior;
6398 }
6399
6400 /* See if this can be simplified into a rotate first. If that
6401 is unsuccessful continue in the association code. */
6402 goto bit_rotate;
6403
6404 case BIT_AND_EXPR:
6405 if (integer_all_onesp (arg1))
6406 return non_lvalue (convert (type, arg0));
6407 if (integer_zerop (arg1))
6408 return omit_one_operand (type, arg1, arg0);
6409 t1 = distribute_bit_expr (code, type, arg0, arg1);
6410 if (t1 != NULL_TREE)
6411 return t1;
6412 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6413 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6414 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6415 {
6416 unsigned int prec
6417 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6418
6419 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6420 && (~TREE_INT_CST_LOW (arg1)
6421 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6422 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
6423 }
6424
6425 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6426
6427 This results in more efficient code for machines without a NOR
6428 instruction. Combine will canonicalize to the first form
6429 which will allow use of NOR instructions provided by the
6430 backend if they exist. */
6431 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6432 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6433 {
6434 return fold (build1 (BIT_NOT_EXPR, type,
6435 build (BIT_IOR_EXPR, type,
6436 TREE_OPERAND (arg0, 0),
6437 TREE_OPERAND (arg1, 0))));
6438 }
6439
6440 goto associate;
6441
6442 case RDIV_EXPR:
6443 /* Don't touch a floating-point divide by zero unless the mode
6444 of the constant can represent infinity. */
6445 if (TREE_CODE (arg1) == REAL_CST
6446 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6447 && real_zerop (arg1))
6448 return t;
6449
6450 /* (-A) / (-B) -> A / B */
6451 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6452 return fold (build (RDIV_EXPR, type,
6453 TREE_OPERAND (arg0, 0),
6454 negate_expr (arg1)));
6455 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6456 return fold (build (RDIV_EXPR, type,
6457 negate_expr (arg0),
6458 TREE_OPERAND (arg1, 0)));
6459
6460 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6461 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6462 && real_onep (arg1))
6463 return non_lvalue (convert (type, arg0));
6464
6465 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
6466 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6467 && real_minus_onep (arg1))
6468 return non_lvalue (convert (type, negate_expr (arg0)));
6469
6470 /* If ARG1 is a constant, we can convert this to a multiply by the
6471 reciprocal. This does not have the same rounding properties,
6472 so only do this if -funsafe-math-optimizations. We can actually
6473 always safely do it if ARG1 is a power of two, but it's hard to
6474 tell if it is or not in a portable manner. */
6475 if (TREE_CODE (arg1) == REAL_CST)
6476 {
6477 if (flag_unsafe_math_optimizations
6478 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6479 arg1, 0)))
6480 return fold (build (MULT_EXPR, type, arg0, tem));
6481 /* Find the reciprocal if optimizing and the result is exact. */
6482 if (optimize)
6483 {
6484 REAL_VALUE_TYPE r;
6485 r = TREE_REAL_CST (arg1);
6486 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6487 {
6488 tem = build_real (type, r);
6489 return fold (build (MULT_EXPR, type, arg0, tem));
6490 }
6491 }
6492 }
6493 /* Convert A/B/C to A/(B*C). */
6494 if (flag_unsafe_math_optimizations
6495 && TREE_CODE (arg0) == RDIV_EXPR)
6496 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6497 fold (build (MULT_EXPR, type,
6498 TREE_OPERAND (arg0, 1), arg1))));
6499
6500 /* Convert A/(B/C) to (A/B)*C. */
6501 if (flag_unsafe_math_optimizations
6502 && TREE_CODE (arg1) == RDIV_EXPR)
6503 return fold (build (MULT_EXPR, type,
6504 fold (build (RDIV_EXPR, type, arg0,
6505 TREE_OPERAND (arg1, 0))),
6506 TREE_OPERAND (arg1, 1)));
6507
6508 /* Convert C1/(X*C2) into (C1/C2)/X. */
6509 if (flag_unsafe_math_optimizations
6510 && TREE_CODE (arg1) == MULT_EXPR
6511 && TREE_CODE (arg0) == REAL_CST
6512 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
6513 {
6514 tree tem = const_binop (RDIV_EXPR, arg0,
6515 TREE_OPERAND (arg1, 1), 0);
6516 if (tem)
6517 return fold (build (RDIV_EXPR, type, tem,
6518 TREE_OPERAND (arg1, 0)));
6519 }
6520
6521 if (flag_unsafe_math_optimizations)
6522 {
6523 enum built_in_function fcode = builtin_mathfn_code (arg1);
6524 /* Optimize x/expN(y) into x*expN(-y). */
6525 if (fcode == BUILT_IN_EXP
6526 || fcode == BUILT_IN_EXPF
6527 || fcode == BUILT_IN_EXPL
6528 || fcode == BUILT_IN_EXP2
6529 || fcode == BUILT_IN_EXP2F
6530 || fcode == BUILT_IN_EXP2L
6531 || fcode == BUILT_IN_EXP10
6532 || fcode == BUILT_IN_EXP10F
6533 || fcode == BUILT_IN_EXP10L
6534 || fcode == BUILT_IN_POW10
6535 || fcode == BUILT_IN_POW10F
6536 || fcode == BUILT_IN_POW10L)
6537 {
6538 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6539 tree arg = build1 (NEGATE_EXPR, type,
6540 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6541 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6542 arg1 = build_function_call_expr (expfn, arglist);
6543 return fold (build (MULT_EXPR, type, arg0, arg1));
6544 }
6545
6546 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6547 if (fcode == BUILT_IN_POW
6548 || fcode == BUILT_IN_POWF
6549 || fcode == BUILT_IN_POWL)
6550 {
6551 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6552 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6553 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6554 tree neg11 = fold (build1 (NEGATE_EXPR, type, arg11));
6555 tree arglist = tree_cons(NULL_TREE, arg10,
6556 build_tree_list (NULL_TREE, neg11));
6557 arg1 = build_function_call_expr (powfn, arglist);
6558 return fold (build (MULT_EXPR, type, arg0, arg1));
6559 }
6560 }
6561
6562 if (flag_unsafe_math_optimizations)
6563 {
6564 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6565 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6566
6567 /* Optimize sin(x)/cos(x) as tan(x). */
6568 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
6569 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
6570 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
6571 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6572 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6573 {
6574 tree tanfn;
6575
6576 if (fcode0 == BUILT_IN_SIN)
6577 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6578 else if (fcode0 == BUILT_IN_SINF)
6579 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6580 else if (fcode0 == BUILT_IN_SINL)
6581 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6582 else
6583 tanfn = NULL_TREE;
6584
6585 if (tanfn != NULL_TREE)
6586 return build_function_call_expr (tanfn,
6587 TREE_OPERAND (arg0, 1));
6588 }
6589
6590 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
6591 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
6592 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
6593 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
6594 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6595 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6596 {
6597 tree tanfn;
6598
6599 if (fcode0 == BUILT_IN_COS)
6600 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6601 else if (fcode0 == BUILT_IN_COSF)
6602 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6603 else if (fcode0 == BUILT_IN_COSL)
6604 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6605 else
6606 tanfn = NULL_TREE;
6607
6608 if (tanfn != NULL_TREE)
6609 {
6610 tree tmp = TREE_OPERAND (arg0, 1);
6611 tmp = build_function_call_expr (tanfn, tmp);
6612 return fold (build (RDIV_EXPR, type,
6613 build_real (type, dconst1),
6614 tmp));
6615 }
6616 }
6617
6618 /* Optimize pow(x,c)/x as pow(x,c-1). */
6619 if (fcode0 == BUILT_IN_POW
6620 || fcode0 == BUILT_IN_POWF
6621 || fcode0 == BUILT_IN_POWL)
6622 {
6623 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6624 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
6625 if (TREE_CODE (arg01) == REAL_CST
6626 && ! TREE_CONSTANT_OVERFLOW (arg01)
6627 && operand_equal_p (arg1, arg00, 0))
6628 {
6629 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6630 REAL_VALUE_TYPE c;
6631 tree arg, arglist;
6632
6633 c = TREE_REAL_CST (arg01);
6634 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
6635 arg = build_real (type, c);
6636 arglist = build_tree_list (NULL_TREE, arg);
6637 arglist = tree_cons (NULL_TREE, arg1, arglist);
6638 return build_function_call_expr (powfn, arglist);
6639 }
6640 }
6641 }
6642 goto binary;
6643
6644 case TRUNC_DIV_EXPR:
6645 case ROUND_DIV_EXPR:
6646 case FLOOR_DIV_EXPR:
6647 case CEIL_DIV_EXPR:
6648 case EXACT_DIV_EXPR:
6649 if (integer_onep (arg1))
6650 return non_lvalue (convert (type, arg0));
6651 if (integer_zerop (arg1))
6652 return t;
6653
6654 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6655 operation, EXACT_DIV_EXPR.
6656
6657 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6658 At one time others generated faster code, it's not clear if they do
6659 after the last round to changes to the DIV code in expmed.c. */
6660 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
6661 && multiple_of_p (type, arg0, arg1))
6662 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
6663
6664 if (TREE_CODE (arg1) == INTEGER_CST
6665 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6666 code, NULL_TREE)))
6667 return convert (type, tem);
6668
6669 goto binary;
6670
6671 case CEIL_MOD_EXPR:
6672 case FLOOR_MOD_EXPR:
6673 case ROUND_MOD_EXPR:
6674 case TRUNC_MOD_EXPR:
6675 if (integer_onep (arg1))
6676 return omit_one_operand (type, integer_zero_node, arg0);
6677 if (integer_zerop (arg1))
6678 return t;
6679
6680 if (TREE_CODE (arg1) == INTEGER_CST
6681 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6682 code, NULL_TREE)))
6683 return convert (type, tem);
6684
6685 goto binary;
6686
6687 case LROTATE_EXPR:
6688 case RROTATE_EXPR:
6689 if (integer_all_onesp (arg0))
6690 return omit_one_operand (type, arg0, arg1);
6691 goto shift;
6692
6693 case RSHIFT_EXPR:
6694 /* Optimize -1 >> x for arithmetic right shifts. */
6695 if (integer_all_onesp (arg0) && ! TREE_UNSIGNED (type))
6696 return omit_one_operand (type, arg0, arg1);
6697 /* ... fall through ... */
6698
6699 case LSHIFT_EXPR:
6700 shift:
6701 if (integer_zerop (arg1))
6702 return non_lvalue (convert (type, arg0));
6703 if (integer_zerop (arg0))
6704 return omit_one_operand (type, arg0, arg1);
6705
6706 /* Since negative shift count is not well-defined,
6707 don't try to compute it in the compiler. */
6708 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
6709 return t;
6710 /* Rewrite an LROTATE_EXPR by a constant into an
6711 RROTATE_EXPR by a new constant. */
6712 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
6713 {
6714 tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
6715 tem = convert (TREE_TYPE (arg1), tem);
6716 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
6717 return fold (build (RROTATE_EXPR, type, arg0, tem));
6718 }
6719
6720 /* If we have a rotate of a bit operation with the rotate count and
6721 the second operand of the bit operation both constant,
6722 permute the two operations. */
6723 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6724 && (TREE_CODE (arg0) == BIT_AND_EXPR
6725 || TREE_CODE (arg0) == BIT_IOR_EXPR
6726 || TREE_CODE (arg0) == BIT_XOR_EXPR)
6727 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6728 return fold (build (TREE_CODE (arg0), type,
6729 fold (build (code, type,
6730 TREE_OPERAND (arg0, 0), arg1)),
6731 fold (build (code, type,
6732 TREE_OPERAND (arg0, 1), arg1))));
6733
6734 /* Two consecutive rotates adding up to the width of the mode can
6735 be ignored. */
6736 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6737 && TREE_CODE (arg0) == RROTATE_EXPR
6738 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6739 && TREE_INT_CST_HIGH (arg1) == 0
6740 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
6741 && ((TREE_INT_CST_LOW (arg1)
6742 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
6743 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
6744 return TREE_OPERAND (arg0, 0);
6745
6746 goto binary;
6747
6748 case MIN_EXPR:
6749 if (operand_equal_p (arg0, arg1, 0))
6750 return omit_one_operand (type, arg0, arg1);
6751 if (INTEGRAL_TYPE_P (type)
6752 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
6753 return omit_one_operand (type, arg1, arg0);
6754 goto associate;
6755
6756 case MAX_EXPR:
6757 if (operand_equal_p (arg0, arg1, 0))
6758 return omit_one_operand (type, arg0, arg1);
6759 if (INTEGRAL_TYPE_P (type)
6760 && TYPE_MAX_VALUE (type)
6761 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
6762 return omit_one_operand (type, arg1, arg0);
6763 goto associate;
6764
6765 case TRUTH_NOT_EXPR:
6766 /* Note that the operand of this must be an int
6767 and its values must be 0 or 1.
6768 ("true" is a fixed value perhaps depending on the language,
6769 but we don't handle values other than 1 correctly yet.) */
6770 tem = invert_truthvalue (arg0);
6771 /* Avoid infinite recursion. */
6772 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6773 {
6774 tem = fold_single_bit_test (code, arg0, arg1, type);
6775 if (tem)
6776 return tem;
6777 return t;
6778 }
6779 return convert (type, tem);
6780
6781 case TRUTH_ANDIF_EXPR:
6782 /* Note that the operands of this must be ints
6783 and their values must be 0 or 1.
6784 ("true" is a fixed value perhaps depending on the language.) */
6785 /* If first arg is constant zero, return it. */
6786 if (integer_zerop (arg0))
6787 return convert (type, arg0);
6788 case TRUTH_AND_EXPR:
6789 /* If either arg is constant true, drop it. */
6790 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6791 return non_lvalue (convert (type, arg1));
6792 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
6793 /* Preserve sequence points. */
6794 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
6795 return non_lvalue (convert (type, arg0));
6796 /* If second arg is constant zero, result is zero, but first arg
6797 must be evaluated. */
6798 if (integer_zerop (arg1))
6799 return omit_one_operand (type, arg1, arg0);
6800 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
6801 case will be handled here. */
6802 if (integer_zerop (arg0))
6803 return omit_one_operand (type, arg0, arg1);
6804
6805 truth_andor:
6806 /* We only do these simplifications if we are optimizing. */
6807 if (!optimize)
6808 return t;
6809
6810 /* Check for things like (A || B) && (A || C). We can convert this
6811 to A || (B && C). Note that either operator can be any of the four
6812 truth and/or operations and the transformation will still be
6813 valid. Also note that we only care about order for the
6814 ANDIF and ORIF operators. If B contains side effects, this
6815 might change the truth-value of A. */
6816 if (TREE_CODE (arg0) == TREE_CODE (arg1)
6817 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
6818 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
6819 || TREE_CODE (arg0) == TRUTH_AND_EXPR
6820 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
6821 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
6822 {
6823 tree a00 = TREE_OPERAND (arg0, 0);
6824 tree a01 = TREE_OPERAND (arg0, 1);
6825 tree a10 = TREE_OPERAND (arg1, 0);
6826 tree a11 = TREE_OPERAND (arg1, 1);
6827 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
6828 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
6829 && (code == TRUTH_AND_EXPR
6830 || code == TRUTH_OR_EXPR));
6831
6832 if (operand_equal_p (a00, a10, 0))
6833 return fold (build (TREE_CODE (arg0), type, a00,
6834 fold (build (code, type, a01, a11))));
6835 else if (commutative && operand_equal_p (a00, a11, 0))
6836 return fold (build (TREE_CODE (arg0), type, a00,
6837 fold (build (code, type, a01, a10))));
6838 else if (commutative && operand_equal_p (a01, a10, 0))
6839 return fold (build (TREE_CODE (arg0), type, a01,
6840 fold (build (code, type, a00, a11))));
6841
6842 /* This case if tricky because we must either have commutative
6843 operators or else A10 must not have side-effects. */
6844
6845 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
6846 && operand_equal_p (a01, a11, 0))
6847 return fold (build (TREE_CODE (arg0), type,
6848 fold (build (code, type, a00, a10)),
6849 a01));
6850 }
6851
6852 /* See if we can build a range comparison. */
6853 if (0 != (tem = fold_range_test (t)))
6854 return tem;
6855
6856 /* Check for the possibility of merging component references. If our
6857 lhs is another similar operation, try to merge its rhs with our
6858 rhs. Then try to merge our lhs and rhs. */
6859 if (TREE_CODE (arg0) == code
6860 && 0 != (tem = fold_truthop (code, type,
6861 TREE_OPERAND (arg0, 1), arg1)))
6862 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6863
6864 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
6865 return tem;
6866
6867 return t;
6868
6869 case TRUTH_ORIF_EXPR:
6870 /* Note that the operands of this must be ints
6871 and their values must be 0 or true.
6872 ("true" is a fixed value perhaps depending on the language.) */
6873 /* If first arg is constant true, return it. */
6874 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6875 return convert (type, arg0);
6876 case TRUTH_OR_EXPR:
6877 /* If either arg is constant zero, drop it. */
6878 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
6879 return non_lvalue (convert (type, arg1));
6880 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
6881 /* Preserve sequence points. */
6882 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
6883 return non_lvalue (convert (type, arg0));
6884 /* If second arg is constant true, result is true, but we must
6885 evaluate first arg. */
6886 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
6887 return omit_one_operand (type, arg1, arg0);
6888 /* Likewise for first arg, but note this only occurs here for
6889 TRUTH_OR_EXPR. */
6890 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6891 return omit_one_operand (type, arg0, arg1);
6892 goto truth_andor;
6893
6894 case TRUTH_XOR_EXPR:
6895 /* If either arg is constant zero, drop it. */
6896 if (integer_zerop (arg0))
6897 return non_lvalue (convert (type, arg1));
6898 if (integer_zerop (arg1))
6899 return non_lvalue (convert (type, arg0));
6900 /* If either arg is constant true, this is a logical inversion. */
6901 if (integer_onep (arg0))
6902 return non_lvalue (convert (type, invert_truthvalue (arg1)));
6903 if (integer_onep (arg1))
6904 return non_lvalue (convert (type, invert_truthvalue (arg0)));
6905 return t;
6906
6907 case EQ_EXPR:
6908 case NE_EXPR:
6909 case LT_EXPR:
6910 case GT_EXPR:
6911 case LE_EXPR:
6912 case GE_EXPR:
6913 /* If one arg is a real or integer constant, put it last. */
6914 if (tree_swap_operands_p (arg0, arg1))
6915 return fold (build (swap_tree_comparison (code), type, arg1, arg0));
6916
6917 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
6918 {
6919 tree targ0 = strip_float_extensions (arg0);
6920 tree targ1 = strip_float_extensions (arg1);
6921 tree newtype = TREE_TYPE (targ0);
6922
6923 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
6924 newtype = TREE_TYPE (targ1);
6925
6926 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
6927 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
6928 return fold (build (code, type, convert (newtype, targ0),
6929 convert (newtype, targ1)));
6930
6931 /* (-a) CMP (-b) -> b CMP a */
6932 if (TREE_CODE (arg0) == NEGATE_EXPR
6933 && TREE_CODE (arg1) == NEGATE_EXPR)
6934 return fold (build (code, type, TREE_OPERAND (arg1, 0),
6935 TREE_OPERAND (arg0, 0)));
6936
6937 if (TREE_CODE (arg1) == REAL_CST)
6938 {
6939 REAL_VALUE_TYPE cst;
6940 cst = TREE_REAL_CST (arg1);
6941
6942 /* (-a) CMP CST -> a swap(CMP) (-CST) */
6943 if (TREE_CODE (arg0) == NEGATE_EXPR)
6944 return
6945 fold (build (swap_tree_comparison (code), type,
6946 TREE_OPERAND (arg0, 0),
6947 build_real (TREE_TYPE (arg1),
6948 REAL_VALUE_NEGATE (cst))));
6949
6950 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
6951 /* a CMP (-0) -> a CMP 0 */
6952 if (REAL_VALUE_MINUS_ZERO (cst))
6953 return fold (build (code, type, arg0,
6954 build_real (TREE_TYPE (arg1), dconst0)));
6955
6956 /* x != NaN is always true, other ops are always false. */
6957 if (REAL_VALUE_ISNAN (cst)
6958 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
6959 {
6960 t = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
6961 return omit_one_operand (type, convert (type, t), arg0);
6962 }
6963
6964 /* Fold comparisons against infinity. */
6965 if (REAL_VALUE_ISINF (cst))
6966 {
6967 tem = fold_inf_compare (code, type, arg0, arg1);
6968 if (tem != NULL_TREE)
6969 return tem;
6970 }
6971 }
6972
6973 /* If this is a comparison of a real constant with a PLUS_EXPR
6974 or a MINUS_EXPR of a real constant, we can convert it into a
6975 comparison with a revised real constant as long as no overflow
6976 occurs when unsafe_math_optimizations are enabled. */
6977 if (flag_unsafe_math_optimizations
6978 && TREE_CODE (arg1) == REAL_CST
6979 && (TREE_CODE (arg0) == PLUS_EXPR
6980 || TREE_CODE (arg0) == MINUS_EXPR)
6981 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6982 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6983 ? MINUS_EXPR : PLUS_EXPR,
6984 arg1, TREE_OPERAND (arg0, 1), 0))
6985 && ! TREE_CONSTANT_OVERFLOW (tem))
6986 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6987
6988 /* Likewise, we can simplify a comparison of a real constant with
6989 a MINUS_EXPR whose first operand is also a real constant, i.e.
6990 (c1 - x) < c2 becomes x > c1-c2. */
6991 if (flag_unsafe_math_optimizations
6992 && TREE_CODE (arg1) == REAL_CST
6993 && TREE_CODE (arg0) == MINUS_EXPR
6994 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
6995 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
6996 arg1, 0))
6997 && ! TREE_CONSTANT_OVERFLOW (tem))
6998 return fold (build (swap_tree_comparison (code), type,
6999 TREE_OPERAND (arg0, 1), tem));
7000
7001 /* Fold comparisons against built-in math functions. */
7002 if (TREE_CODE (arg1) == REAL_CST
7003 && flag_unsafe_math_optimizations
7004 && ! flag_errno_math)
7005 {
7006 enum built_in_function fcode = builtin_mathfn_code (arg0);
7007
7008 if (fcode != END_BUILTINS)
7009 {
7010 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7011 if (tem != NULL_TREE)
7012 return tem;
7013 }
7014 }
7015 }
7016
7017 /* Convert foo++ == CONST into ++foo == CONST + INCR.
7018 First, see if one arg is constant; find the constant arg
7019 and the other one. */
7020 {
7021 tree constop = 0, varop = NULL_TREE;
7022 int constopnum = -1;
7023
7024 if (TREE_CONSTANT (arg1))
7025 constopnum = 1, constop = arg1, varop = arg0;
7026 if (TREE_CONSTANT (arg0))
7027 constopnum = 0, constop = arg0, varop = arg1;
7028
7029 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
7030 {
7031 /* This optimization is invalid for ordered comparisons
7032 if CONST+INCR overflows or if foo+incr might overflow.
7033 This optimization is invalid for floating point due to rounding.
7034 For pointer types we assume overflow doesn't happen. */
7035 if (POINTER_TYPE_P (TREE_TYPE (varop))
7036 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
7037 && (code == EQ_EXPR || code == NE_EXPR)))
7038 {
7039 tree newconst
7040 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
7041 constop, TREE_OPERAND (varop, 1)));
7042
7043 /* Do not overwrite the current varop to be a preincrement,
7044 create a new node so that we won't confuse our caller who
7045 might create trees and throw them away, reusing the
7046 arguments that they passed to build. This shows up in
7047 the THEN or ELSE parts of ?: being postincrements. */
7048 varop = build (PREINCREMENT_EXPR, TREE_TYPE (varop),
7049 TREE_OPERAND (varop, 0),
7050 TREE_OPERAND (varop, 1));
7051
7052 /* If VAROP is a reference to a bitfield, we must mask
7053 the constant by the width of the field. */
7054 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7055 && DECL_BIT_FIELD(TREE_OPERAND
7056 (TREE_OPERAND (varop, 0), 1)))
7057 {
7058 int size
7059 = TREE_INT_CST_LOW (DECL_SIZE
7060 (TREE_OPERAND
7061 (TREE_OPERAND (varop, 0), 1)));
7062 tree mask, unsigned_type;
7063 unsigned int precision;
7064 tree folded_compare;
7065
7066 /* First check whether the comparison would come out
7067 always the same. If we don't do that we would
7068 change the meaning with the masking. */
7069 if (constopnum == 0)
7070 folded_compare = fold (build (code, type, constop,
7071 TREE_OPERAND (varop, 0)));
7072 else
7073 folded_compare = fold (build (code, type,
7074 TREE_OPERAND (varop, 0),
7075 constop));
7076 if (integer_zerop (folded_compare)
7077 || integer_onep (folded_compare))
7078 return omit_one_operand (type, folded_compare, varop);
7079
7080 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
7081 precision = TYPE_PRECISION (unsigned_type);
7082 mask = build_int_2 (~0, ~0);
7083 TREE_TYPE (mask) = unsigned_type;
7084 force_fit_type (mask, 0);
7085 mask = const_binop (RSHIFT_EXPR, mask,
7086 size_int (precision - size), 0);
7087 newconst = fold (build (BIT_AND_EXPR,
7088 TREE_TYPE (varop), newconst,
7089 convert (TREE_TYPE (varop),
7090 mask)));
7091 }
7092
7093 t = build (code, type,
7094 (constopnum == 0) ? newconst : varop,
7095 (constopnum == 1) ? newconst : varop);
7096 return t;
7097 }
7098 }
7099 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
7100 {
7101 if (POINTER_TYPE_P (TREE_TYPE (varop))
7102 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
7103 && (code == EQ_EXPR || code == NE_EXPR)))
7104 {
7105 tree newconst
7106 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
7107 constop, TREE_OPERAND (varop, 1)));
7108
7109 /* Do not overwrite the current varop to be a predecrement,
7110 create a new node so that we won't confuse our caller who
7111 might create trees and throw them away, reusing the
7112 arguments that they passed to build. This shows up in
7113 the THEN or ELSE parts of ?: being postdecrements. */
7114 varop = build (PREDECREMENT_EXPR, TREE_TYPE (varop),
7115 TREE_OPERAND (varop, 0),
7116 TREE_OPERAND (varop, 1));
7117
7118 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7119 && DECL_BIT_FIELD(TREE_OPERAND
7120 (TREE_OPERAND (varop, 0), 1)))
7121 {
7122 int size
7123 = TREE_INT_CST_LOW (DECL_SIZE
7124 (TREE_OPERAND
7125 (TREE_OPERAND (varop, 0), 1)));
7126 tree mask, unsigned_type;
7127 unsigned int precision;
7128 tree folded_compare;
7129
7130 if (constopnum == 0)
7131 folded_compare = fold (build (code, type, constop,
7132 TREE_OPERAND (varop, 0)));
7133 else
7134 folded_compare = fold (build (code, type,
7135 TREE_OPERAND (varop, 0),
7136 constop));
7137 if (integer_zerop (folded_compare)
7138 || integer_onep (folded_compare))
7139 return omit_one_operand (type, folded_compare, varop);
7140
7141 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
7142 precision = TYPE_PRECISION (unsigned_type);
7143 mask = build_int_2 (~0, ~0);
7144 TREE_TYPE (mask) = TREE_TYPE (varop);
7145 force_fit_type (mask, 0);
7146 mask = const_binop (RSHIFT_EXPR, mask,
7147 size_int (precision - size), 0);
7148 newconst = fold (build (BIT_AND_EXPR,
7149 TREE_TYPE (varop), newconst,
7150 convert (TREE_TYPE (varop),
7151 mask)));
7152 }
7153
7154 t = build (code, type,
7155 (constopnum == 0) ? newconst : varop,
7156 (constopnum == 1) ? newconst : varop);
7157 return t;
7158 }
7159 }
7160 }
7161
7162 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7163 This transformation affects the cases which are handled in later
7164 optimizations involving comparisons with non-negative constants. */
7165 if (TREE_CODE (arg1) == INTEGER_CST
7166 && TREE_CODE (arg0) != INTEGER_CST
7167 && tree_int_cst_sgn (arg1) > 0)
7168 {
7169 switch (code)
7170 {
7171 case GE_EXPR:
7172 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7173 return fold (build (GT_EXPR, type, arg0, arg1));
7174
7175 case LT_EXPR:
7176 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7177 return fold (build (LE_EXPR, type, arg0, arg1));
7178
7179 default:
7180 break;
7181 }
7182 }
7183
7184 /* Comparisons with the highest or lowest possible integer of
7185 the specified size will have known values. */
7186 {
7187 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7188
7189 if (TREE_CODE (arg1) == INTEGER_CST
7190 && ! TREE_CONSTANT_OVERFLOW (arg1)
7191 && width <= HOST_BITS_PER_WIDE_INT
7192 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7193 || POINTER_TYPE_P (TREE_TYPE (arg1))))
7194 {
7195 unsigned HOST_WIDE_INT signed_max;
7196 unsigned HOST_WIDE_INT max, min;
7197
7198 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7199
7200 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7201 {
7202 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7203 min = 0;
7204 }
7205 else
7206 {
7207 max = signed_max;
7208 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7209 }
7210
7211 if (TREE_INT_CST_HIGH (arg1) == 0
7212 && TREE_INT_CST_LOW (arg1) == max)
7213 switch (code)
7214 {
7215 case GT_EXPR:
7216 return omit_one_operand (type,
7217 convert (type, integer_zero_node),
7218 arg0);
7219 case GE_EXPR:
7220 return fold (build (EQ_EXPR, type, arg0, arg1));
7221
7222 case LE_EXPR:
7223 return omit_one_operand (type,
7224 convert (type, integer_one_node),
7225 arg0);
7226 case LT_EXPR:
7227 return fold (build (NE_EXPR, type, arg0, arg1));
7228
7229 /* The GE_EXPR and LT_EXPR cases above are not normally
7230 reached because of previous transformations. */
7231
7232 default:
7233 break;
7234 }
7235 else if (TREE_INT_CST_HIGH (arg1) == 0
7236 && TREE_INT_CST_LOW (arg1) == max - 1)
7237 switch (code)
7238 {
7239 case GT_EXPR:
7240 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7241 return fold (build (EQ_EXPR, type, arg0, arg1));
7242 case LE_EXPR:
7243 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7244 return fold (build (NE_EXPR, type, arg0, arg1));
7245 default:
7246 break;
7247 }
7248 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7249 && TREE_INT_CST_LOW (arg1) == min)
7250 switch (code)
7251 {
7252 case LT_EXPR:
7253 return omit_one_operand (type,
7254 convert (type, integer_zero_node),
7255 arg0);
7256 case LE_EXPR:
7257 return fold (build (EQ_EXPR, type, arg0, arg1));
7258
7259 case GE_EXPR:
7260 return omit_one_operand (type,
7261 convert (type, integer_one_node),
7262 arg0);
7263 case GT_EXPR:
7264 return fold (build (NE_EXPR, type, arg0, arg1));
7265
7266 default:
7267 break;
7268 }
7269 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7270 && TREE_INT_CST_LOW (arg1) == min + 1)
7271 switch (code)
7272 {
7273 case GE_EXPR:
7274 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7275 return fold (build (NE_EXPR, type, arg0, arg1));
7276 case LT_EXPR:
7277 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7278 return fold (build (EQ_EXPR, type, arg0, arg1));
7279 default:
7280 break;
7281 }
7282
7283 else if (TREE_INT_CST_HIGH (arg1) == 0
7284 && TREE_INT_CST_LOW (arg1) == signed_max
7285 && TREE_UNSIGNED (TREE_TYPE (arg1))
7286 /* signed_type does not work on pointer types. */
7287 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7288 {
7289 /* The following case also applies to X < signed_max+1
7290 and X >= signed_max+1 because previous transformations. */
7291 if (code == LE_EXPR || code == GT_EXPR)
7292 {
7293 tree st0, st1;
7294 st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
7295 st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
7296 return fold
7297 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7298 type, convert (st0, arg0),
7299 convert (st1, integer_zero_node)));
7300 }
7301 }
7302 }
7303 }
7304
7305 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7306 a MINUS_EXPR of a constant, we can convert it into a comparison with
7307 a revised constant as long as no overflow occurs. */
7308 if ((code == EQ_EXPR || code == NE_EXPR)
7309 && TREE_CODE (arg1) == INTEGER_CST
7310 && (TREE_CODE (arg0) == PLUS_EXPR
7311 || TREE_CODE (arg0) == MINUS_EXPR)
7312 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7313 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7314 ? MINUS_EXPR : PLUS_EXPR,
7315 arg1, TREE_OPERAND (arg0, 1), 0))
7316 && ! TREE_CONSTANT_OVERFLOW (tem))
7317 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7318
7319 /* Similarly for a NEGATE_EXPR. */
7320 else if ((code == EQ_EXPR || code == NE_EXPR)
7321 && TREE_CODE (arg0) == NEGATE_EXPR
7322 && TREE_CODE (arg1) == INTEGER_CST
7323 && 0 != (tem = negate_expr (arg1))
7324 && TREE_CODE (tem) == INTEGER_CST
7325 && ! TREE_CONSTANT_OVERFLOW (tem))
7326 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7327
7328 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7329 for !=. Don't do this for ordered comparisons due to overflow. */
7330 else if ((code == NE_EXPR || code == EQ_EXPR)
7331 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7332 return fold (build (code, type,
7333 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7334
7335 /* If we are widening one operand of an integer comparison,
7336 see if the other operand is similarly being widened. Perhaps we
7337 can do the comparison in the narrower type. */
7338 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7339 && TREE_CODE (arg0) == NOP_EXPR
7340 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7341 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7342 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7343 || (TREE_CODE (t1) == INTEGER_CST
7344 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7345 return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
7346
7347 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7348 constant, we can simplify it. */
7349 else if (TREE_CODE (arg1) == INTEGER_CST
7350 && (TREE_CODE (arg0) == MIN_EXPR
7351 || TREE_CODE (arg0) == MAX_EXPR)
7352 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7353 return optimize_minmax_comparison (t);
7354
7355 /* If we are comparing an ABS_EXPR with a constant, we can
7356 convert all the cases into explicit comparisons, but they may
7357 well not be faster than doing the ABS and one comparison.
7358 But ABS (X) <= C is a range comparison, which becomes a subtraction
7359 and a comparison, and is probably faster. */
7360 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7361 && TREE_CODE (arg0) == ABS_EXPR
7362 && ! TREE_SIDE_EFFECTS (arg0)
7363 && (0 != (tem = negate_expr (arg1)))
7364 && TREE_CODE (tem) == INTEGER_CST
7365 && ! TREE_CONSTANT_OVERFLOW (tem))
7366 return fold (build (TRUTH_ANDIF_EXPR, type,
7367 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
7368 build (LE_EXPR, type,
7369 TREE_OPERAND (arg0, 0), arg1)));
7370
7371 /* If this is an EQ or NE comparison with zero and ARG0 is
7372 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7373 two operations, but the latter can be done in one less insn
7374 on machines that have only two-operand insns or on which a
7375 constant cannot be the first operand. */
7376 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7377 && TREE_CODE (arg0) == BIT_AND_EXPR)
7378 {
7379 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
7380 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
7381 return
7382 fold (build (code, type,
7383 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7384 build (RSHIFT_EXPR,
7385 TREE_TYPE (TREE_OPERAND (arg0, 0)),
7386 TREE_OPERAND (arg0, 1),
7387 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
7388 convert (TREE_TYPE (arg0),
7389 integer_one_node)),
7390 arg1));
7391 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7392 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7393 return
7394 fold (build (code, type,
7395 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7396 build (RSHIFT_EXPR,
7397 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7398 TREE_OPERAND (arg0, 0),
7399 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
7400 convert (TREE_TYPE (arg0),
7401 integer_one_node)),
7402 arg1));
7403 }
7404
7405 /* If this is an NE or EQ comparison of zero against the result of a
7406 signed MOD operation whose second operand is a power of 2, make
7407 the MOD operation unsigned since it is simpler and equivalent. */
7408 if ((code == NE_EXPR || code == EQ_EXPR)
7409 && integer_zerop (arg1)
7410 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
7411 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7412 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7413 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7414 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7415 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7416 {
7417 tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
7418 tree newmod = build (TREE_CODE (arg0), newtype,
7419 convert (newtype, TREE_OPERAND (arg0, 0)),
7420 convert (newtype, TREE_OPERAND (arg0, 1)));
7421
7422 return build (code, type, newmod, convert (newtype, arg1));
7423 }
7424
7425 /* If this is an NE comparison of zero with an AND of one, remove the
7426 comparison since the AND will give the correct value. */
7427 if (code == NE_EXPR && integer_zerop (arg1)
7428 && TREE_CODE (arg0) == BIT_AND_EXPR
7429 && integer_onep (TREE_OPERAND (arg0, 1)))
7430 return convert (type, arg0);
7431
7432 /* If we have (A & C) == C where C is a power of 2, convert this into
7433 (A & C) != 0. Similarly for NE_EXPR. */
7434 if ((code == EQ_EXPR || code == NE_EXPR)
7435 && TREE_CODE (arg0) == BIT_AND_EXPR
7436 && integer_pow2p (TREE_OPERAND (arg0, 1))
7437 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7438 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7439 arg0, integer_zero_node));
7440
7441 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7442 2, then fold the expression into shifts and logical operations. */
7443 tem = fold_single_bit_test (code, arg0, arg1, type);
7444 if (tem)
7445 return tem;
7446
7447 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7448 Similarly for NE_EXPR. */
7449 if ((code == EQ_EXPR || code == NE_EXPR)
7450 && TREE_CODE (arg0) == BIT_AND_EXPR
7451 && TREE_CODE (arg1) == INTEGER_CST
7452 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7453 {
7454 tree dandnotc
7455 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7456 arg1, build1 (BIT_NOT_EXPR,
7457 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7458 TREE_OPERAND (arg0, 1))));
7459 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7460 if (integer_nonzerop (dandnotc))
7461 return omit_one_operand (type, rslt, arg0);
7462 }
7463
7464 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7465 Similarly for NE_EXPR. */
7466 if ((code == EQ_EXPR || code == NE_EXPR)
7467 && TREE_CODE (arg0) == BIT_IOR_EXPR
7468 && TREE_CODE (arg1) == INTEGER_CST
7469 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7470 {
7471 tree candnotd
7472 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7473 TREE_OPERAND (arg0, 1),
7474 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
7475 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7476 if (integer_nonzerop (candnotd))
7477 return omit_one_operand (type, rslt, arg0);
7478 }
7479
7480 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7481 and similarly for >= into !=. */
7482 if ((code == LT_EXPR || code == GE_EXPR)
7483 && TREE_UNSIGNED (TREE_TYPE (arg0))
7484 && TREE_CODE (arg1) == LSHIFT_EXPR
7485 && integer_onep (TREE_OPERAND (arg1, 0)))
7486 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7487 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7488 TREE_OPERAND (arg1, 1)),
7489 convert (TREE_TYPE (arg0), integer_zero_node));
7490
7491 else if ((code == LT_EXPR || code == GE_EXPR)
7492 && TREE_UNSIGNED (TREE_TYPE (arg0))
7493 && (TREE_CODE (arg1) == NOP_EXPR
7494 || TREE_CODE (arg1) == CONVERT_EXPR)
7495 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7496 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7497 return
7498 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7499 convert (TREE_TYPE (arg0),
7500 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7501 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
7502 convert (TREE_TYPE (arg0), integer_zero_node));
7503
7504 /* Simplify comparison of something with itself. (For IEEE
7505 floating-point, we can only do some of these simplifications.) */
7506 if (operand_equal_p (arg0, arg1, 0))
7507 {
7508 switch (code)
7509 {
7510 case EQ_EXPR:
7511 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7512 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7513 return constant_boolean_node (1, type);
7514 break;
7515
7516 case GE_EXPR:
7517 case LE_EXPR:
7518 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7519 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7520 return constant_boolean_node (1, type);
7521 return fold (build (EQ_EXPR, type, arg0, arg1));
7522
7523 case NE_EXPR:
7524 /* For NE, we can only do this simplification if integer
7525 or we don't honor IEEE floating point NaNs. */
7526 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7527 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7528 break;
7529 /* ... fall through ... */
7530 case GT_EXPR:
7531 case LT_EXPR:
7532 return constant_boolean_node (0, type);
7533 default:
7534 abort ();
7535 }
7536 }
7537
7538 /* If we are comparing an expression that just has comparisons
7539 of two integer values, arithmetic expressions of those comparisons,
7540 and constants, we can simplify it. There are only three cases
7541 to check: the two values can either be equal, the first can be
7542 greater, or the second can be greater. Fold the expression for
7543 those three values. Since each value must be 0 or 1, we have
7544 eight possibilities, each of which corresponds to the constant 0
7545 or 1 or one of the six possible comparisons.
7546
7547 This handles common cases like (a > b) == 0 but also handles
7548 expressions like ((x > y) - (y > x)) > 0, which supposedly
7549 occur in macroized code. */
7550
7551 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7552 {
7553 tree cval1 = 0, cval2 = 0;
7554 int save_p = 0;
7555
7556 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7557 /* Don't handle degenerate cases here; they should already
7558 have been handled anyway. */
7559 && cval1 != 0 && cval2 != 0
7560 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7561 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7562 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7563 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7564 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7565 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7566 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7567 {
7568 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7569 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7570
7571 /* We can't just pass T to eval_subst in case cval1 or cval2
7572 was the same as ARG1. */
7573
7574 tree high_result
7575 = fold (build (code, type,
7576 eval_subst (arg0, cval1, maxval, cval2, minval),
7577 arg1));
7578 tree equal_result
7579 = fold (build (code, type,
7580 eval_subst (arg0, cval1, maxval, cval2, maxval),
7581 arg1));
7582 tree low_result
7583 = fold (build (code, type,
7584 eval_subst (arg0, cval1, minval, cval2, maxval),
7585 arg1));
7586
7587 /* All three of these results should be 0 or 1. Confirm they
7588 are. Then use those values to select the proper code
7589 to use. */
7590
7591 if ((integer_zerop (high_result)
7592 || integer_onep (high_result))
7593 && (integer_zerop (equal_result)
7594 || integer_onep (equal_result))
7595 && (integer_zerop (low_result)
7596 || integer_onep (low_result)))
7597 {
7598 /* Make a 3-bit mask with the high-order bit being the
7599 value for `>', the next for '=', and the low for '<'. */
7600 switch ((integer_onep (high_result) * 4)
7601 + (integer_onep (equal_result) * 2)
7602 + integer_onep (low_result))
7603 {
7604 case 0:
7605 /* Always false. */
7606 return omit_one_operand (type, integer_zero_node, arg0);
7607 case 1:
7608 code = LT_EXPR;
7609 break;
7610 case 2:
7611 code = EQ_EXPR;
7612 break;
7613 case 3:
7614 code = LE_EXPR;
7615 break;
7616 case 4:
7617 code = GT_EXPR;
7618 break;
7619 case 5:
7620 code = NE_EXPR;
7621 break;
7622 case 6:
7623 code = GE_EXPR;
7624 break;
7625 case 7:
7626 /* Always true. */
7627 return omit_one_operand (type, integer_one_node, arg0);
7628 }
7629
7630 t = build (code, type, cval1, cval2);
7631 if (save_p)
7632 return save_expr (t);
7633 else
7634 return fold (t);
7635 }
7636 }
7637 }
7638
7639 /* If this is a comparison of a field, we may be able to simplify it. */
7640 if (((TREE_CODE (arg0) == COMPONENT_REF
7641 && (*lang_hooks.can_use_bit_fields_p) ())
7642 || TREE_CODE (arg0) == BIT_FIELD_REF)
7643 && (code == EQ_EXPR || code == NE_EXPR)
7644 /* Handle the constant case even without -O
7645 to make sure the warnings are given. */
7646 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
7647 {
7648 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
7649 return t1 ? t1 : t;
7650 }
7651
7652 /* If this is a comparison of complex values and either or both sides
7653 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7654 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7655 This may prevent needless evaluations. */
7656 if ((code == EQ_EXPR || code == NE_EXPR)
7657 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
7658 && (TREE_CODE (arg0) == COMPLEX_EXPR
7659 || TREE_CODE (arg1) == COMPLEX_EXPR
7660 || TREE_CODE (arg0) == COMPLEX_CST
7661 || TREE_CODE (arg1) == COMPLEX_CST))
7662 {
7663 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
7664 tree real0, imag0, real1, imag1;
7665
7666 arg0 = save_expr (arg0);
7667 arg1 = save_expr (arg1);
7668 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
7669 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
7670 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
7671 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
7672
7673 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
7674 : TRUTH_ORIF_EXPR),
7675 type,
7676 fold (build (code, type, real0, real1)),
7677 fold (build (code, type, imag0, imag1))));
7678 }
7679
7680 /* Optimize comparisons of strlen vs zero to a compare of the
7681 first character of the string vs zero. To wit,
7682 strlen(ptr) == 0 => *ptr == 0
7683 strlen(ptr) != 0 => *ptr != 0
7684 Other cases should reduce to one of these two (or a constant)
7685 due to the return value of strlen being unsigned. */
7686 if ((code == EQ_EXPR || code == NE_EXPR)
7687 && integer_zerop (arg1)
7688 && TREE_CODE (arg0) == CALL_EXPR)
7689 {
7690 tree fndecl = get_callee_fndecl (arg0);
7691 tree arglist;
7692
7693 if (fndecl
7694 && DECL_BUILT_IN (fndecl)
7695 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
7696 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
7697 && (arglist = TREE_OPERAND (arg0, 1))
7698 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
7699 && ! TREE_CHAIN (arglist))
7700 return fold (build (code, type,
7701 build1 (INDIRECT_REF, char_type_node,
7702 TREE_VALUE(arglist)),
7703 integer_zero_node));
7704 }
7705
7706 /* From here on, the only cases we handle are when the result is
7707 known to be a constant.
7708
7709 To compute GT, swap the arguments and do LT.
7710 To compute GE, do LT and invert the result.
7711 To compute LE, swap the arguments, do LT and invert the result.
7712 To compute NE, do EQ and invert the result.
7713
7714 Therefore, the code below must handle only EQ and LT. */
7715
7716 if (code == LE_EXPR || code == GT_EXPR)
7717 {
7718 tem = arg0, arg0 = arg1, arg1 = tem;
7719 code = swap_tree_comparison (code);
7720 }
7721
7722 /* Note that it is safe to invert for real values here because we
7723 will check below in the one case that it matters. */
7724
7725 t1 = NULL_TREE;
7726 invert = 0;
7727 if (code == NE_EXPR || code == GE_EXPR)
7728 {
7729 invert = 1;
7730 code = invert_tree_comparison (code);
7731 }
7732
7733 /* Compute a result for LT or EQ if args permit;
7734 otherwise return T. */
7735 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
7736 {
7737 if (code == EQ_EXPR)
7738 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
7739 else
7740 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
7741 ? INT_CST_LT_UNSIGNED (arg0, arg1)
7742 : INT_CST_LT (arg0, arg1)),
7743 0);
7744 }
7745
7746 #if 0 /* This is no longer useful, but breaks some real code. */
7747 /* Assume a nonexplicit constant cannot equal an explicit one,
7748 since such code would be undefined anyway.
7749 Exception: on sysvr4, using #pragma weak,
7750 a label can come out as 0. */
7751 else if (TREE_CODE (arg1) == INTEGER_CST
7752 && !integer_zerop (arg1)
7753 && TREE_CONSTANT (arg0)
7754 && TREE_CODE (arg0) == ADDR_EXPR
7755 && code == EQ_EXPR)
7756 t1 = build_int_2 (0, 0);
7757 #endif
7758 /* Two real constants can be compared explicitly. */
7759 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
7760 {
7761 /* If either operand is a NaN, the result is false with two
7762 exceptions: First, an NE_EXPR is true on NaNs, but that case
7763 is already handled correctly since we will be inverting the
7764 result for NE_EXPR. Second, if we had inverted a LE_EXPR
7765 or a GE_EXPR into a LT_EXPR, we must return true so that it
7766 will be inverted into false. */
7767
7768 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
7769 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
7770 t1 = build_int_2 (invert && code == LT_EXPR, 0);
7771
7772 else if (code == EQ_EXPR)
7773 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
7774 TREE_REAL_CST (arg1)),
7775 0);
7776 else
7777 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
7778 TREE_REAL_CST (arg1)),
7779 0);
7780 }
7781
7782 if (t1 == NULL_TREE)
7783 return t;
7784
7785 if (invert)
7786 TREE_INT_CST_LOW (t1) ^= 1;
7787
7788 TREE_TYPE (t1) = type;
7789 if (TREE_CODE (type) == BOOLEAN_TYPE)
7790 return (*lang_hooks.truthvalue_conversion) (t1);
7791 return t1;
7792
7793 case COND_EXPR:
7794 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
7795 so all simple results must be passed through pedantic_non_lvalue. */
7796 if (TREE_CODE (arg0) == INTEGER_CST)
7797 return pedantic_non_lvalue
7798 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
7799 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
7800 return pedantic_omit_one_operand (type, arg1, arg0);
7801
7802 /* If we have A op B ? A : C, we may be able to convert this to a
7803 simpler expression, depending on the operation and the values
7804 of B and C. Signed zeros prevent all of these transformations,
7805 for reasons given above each one. */
7806
7807 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7808 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7809 arg1, TREE_OPERAND (arg0, 1))
7810 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
7811 {
7812 tree arg2 = TREE_OPERAND (t, 2);
7813 enum tree_code comp_code = TREE_CODE (arg0);
7814
7815 STRIP_NOPS (arg2);
7816
7817 /* If we have A op 0 ? A : -A, consider applying the following
7818 transformations:
7819
7820 A == 0? A : -A same as -A
7821 A != 0? A : -A same as A
7822 A >= 0? A : -A same as abs (A)
7823 A > 0? A : -A same as abs (A)
7824 A <= 0? A : -A same as -abs (A)
7825 A < 0? A : -A same as -abs (A)
7826
7827 None of these transformations work for modes with signed
7828 zeros. If A is +/-0, the first two transformations will
7829 change the sign of the result (from +0 to -0, or vice
7830 versa). The last four will fix the sign of the result,
7831 even though the original expressions could be positive or
7832 negative, depending on the sign of A.
7833
7834 Note that all these transformations are correct if A is
7835 NaN, since the two alternatives (A and -A) are also NaNs. */
7836 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
7837 ? real_zerop (TREE_OPERAND (arg0, 1))
7838 : integer_zerop (TREE_OPERAND (arg0, 1)))
7839 && TREE_CODE (arg2) == NEGATE_EXPR
7840 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
7841 switch (comp_code)
7842 {
7843 case EQ_EXPR:
7844 return
7845 pedantic_non_lvalue
7846 (convert (type,
7847 negate_expr
7848 (convert (TREE_TYPE (TREE_OPERAND (t, 1)),
7849 arg1))));
7850 case NE_EXPR:
7851 return pedantic_non_lvalue (convert (type, arg1));
7852 case GE_EXPR:
7853 case GT_EXPR:
7854 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7855 arg1 = convert ((*lang_hooks.types.signed_type)
7856 (TREE_TYPE (arg1)), arg1);
7857 return pedantic_non_lvalue
7858 (convert (type, fold (build1 (ABS_EXPR,
7859 TREE_TYPE (arg1), arg1))));
7860 case LE_EXPR:
7861 case LT_EXPR:
7862 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7863 arg1 = convert ((lang_hooks.types.signed_type)
7864 (TREE_TYPE (arg1)), arg1);
7865 return pedantic_non_lvalue
7866 (negate_expr (convert (type,
7867 fold (build1 (ABS_EXPR,
7868 TREE_TYPE (arg1),
7869 arg1)))));
7870 default:
7871 abort ();
7872 }
7873
7874 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
7875 A == 0 ? A : 0 is always 0 unless A is -0. Note that
7876 both transformations are correct when A is NaN: A != 0
7877 is then true, and A == 0 is false. */
7878
7879 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
7880 {
7881 if (comp_code == NE_EXPR)
7882 return pedantic_non_lvalue (convert (type, arg1));
7883 else if (comp_code == EQ_EXPR)
7884 return pedantic_non_lvalue (convert (type, integer_zero_node));
7885 }
7886
7887 /* Try some transformations of A op B ? A : B.
7888
7889 A == B? A : B same as B
7890 A != B? A : B same as A
7891 A >= B? A : B same as max (A, B)
7892 A > B? A : B same as max (B, A)
7893 A <= B? A : B same as min (A, B)
7894 A < B? A : B same as min (B, A)
7895
7896 As above, these transformations don't work in the presence
7897 of signed zeros. For example, if A and B are zeros of
7898 opposite sign, the first two transformations will change
7899 the sign of the result. In the last four, the original
7900 expressions give different results for (A=+0, B=-0) and
7901 (A=-0, B=+0), but the transformed expressions do not.
7902
7903 The first two transformations are correct if either A or B
7904 is a NaN. In the first transformation, the condition will
7905 be false, and B will indeed be chosen. In the case of the
7906 second transformation, the condition A != B will be true,
7907 and A will be chosen.
7908
7909 The conversions to max() and min() are not correct if B is
7910 a number and A is not. The conditions in the original
7911 expressions will be false, so all four give B. The min()
7912 and max() versions would give a NaN instead. */
7913 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
7914 arg2, TREE_OPERAND (arg0, 0)))
7915 {
7916 tree comp_op0 = TREE_OPERAND (arg0, 0);
7917 tree comp_op1 = TREE_OPERAND (arg0, 1);
7918 tree comp_type = TREE_TYPE (comp_op0);
7919
7920 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
7921 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
7922 {
7923 comp_type = type;
7924 comp_op0 = arg1;
7925 comp_op1 = arg2;
7926 }
7927
7928 switch (comp_code)
7929 {
7930 case EQ_EXPR:
7931 return pedantic_non_lvalue (convert (type, arg2));
7932 case NE_EXPR:
7933 return pedantic_non_lvalue (convert (type, arg1));
7934 case LE_EXPR:
7935 case LT_EXPR:
7936 /* In C++ a ?: expression can be an lvalue, so put the
7937 operand which will be used if they are equal first
7938 so that we can convert this back to the
7939 corresponding COND_EXPR. */
7940 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
7941 return pedantic_non_lvalue
7942 (convert (type, fold (build (MIN_EXPR, comp_type,
7943 (comp_code == LE_EXPR
7944 ? comp_op0 : comp_op1),
7945 (comp_code == LE_EXPR
7946 ? comp_op1 : comp_op0)))));
7947 break;
7948 case GE_EXPR:
7949 case GT_EXPR:
7950 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
7951 return pedantic_non_lvalue
7952 (convert (type, fold (build (MAX_EXPR, comp_type,
7953 (comp_code == GE_EXPR
7954 ? comp_op0 : comp_op1),
7955 (comp_code == GE_EXPR
7956 ? comp_op1 : comp_op0)))));
7957 break;
7958 default:
7959 abort ();
7960 }
7961 }
7962
7963 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
7964 we might still be able to simplify this. For example,
7965 if C1 is one less or one more than C2, this might have started
7966 out as a MIN or MAX and been transformed by this function.
7967 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
7968
7969 if (INTEGRAL_TYPE_P (type)
7970 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7971 && TREE_CODE (arg2) == INTEGER_CST)
7972 switch (comp_code)
7973 {
7974 case EQ_EXPR:
7975 /* We can replace A with C1 in this case. */
7976 arg1 = convert (type, TREE_OPERAND (arg0, 1));
7977 return fold (build (code, type, TREE_OPERAND (t, 0), arg1,
7978 TREE_OPERAND (t, 2)));
7979
7980 case LT_EXPR:
7981 /* If C1 is C2 + 1, this is min(A, C2). */
7982 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
7983 && operand_equal_p (TREE_OPERAND (arg0, 1),
7984 const_binop (PLUS_EXPR, arg2,
7985 integer_one_node, 0), 1))
7986 return pedantic_non_lvalue
7987 (fold (build (MIN_EXPR, type, arg1, arg2)));
7988 break;
7989
7990 case LE_EXPR:
7991 /* If C1 is C2 - 1, this is min(A, C2). */
7992 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
7993 && operand_equal_p (TREE_OPERAND (arg0, 1),
7994 const_binop (MINUS_EXPR, arg2,
7995 integer_one_node, 0), 1))
7996 return pedantic_non_lvalue
7997 (fold (build (MIN_EXPR, type, arg1, arg2)));
7998 break;
7999
8000 case GT_EXPR:
8001 /* If C1 is C2 - 1, this is max(A, C2). */
8002 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8003 && operand_equal_p (TREE_OPERAND (arg0, 1),
8004 const_binop (MINUS_EXPR, arg2,
8005 integer_one_node, 0), 1))
8006 return pedantic_non_lvalue
8007 (fold (build (MAX_EXPR, type, arg1, arg2)));
8008 break;
8009
8010 case GE_EXPR:
8011 /* If C1 is C2 + 1, this is max(A, C2). */
8012 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8013 && operand_equal_p (TREE_OPERAND (arg0, 1),
8014 const_binop (PLUS_EXPR, arg2,
8015 integer_one_node, 0), 1))
8016 return pedantic_non_lvalue
8017 (fold (build (MAX_EXPR, type, arg1, arg2)));
8018 break;
8019 case NE_EXPR:
8020 break;
8021 default:
8022 abort ();
8023 }
8024 }
8025
8026 /* If the second operand is simpler than the third, swap them
8027 since that produces better jump optimization results. */
8028 if (tree_swap_operands_p (TREE_OPERAND (t, 1), TREE_OPERAND (t, 2)))
8029 {
8030 /* See if this can be inverted. If it can't, possibly because
8031 it was a floating-point inequality comparison, don't do
8032 anything. */
8033 tem = invert_truthvalue (arg0);
8034
8035 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8036 return fold (build (code, type, tem,
8037 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8038 }
8039
8040 /* Convert A ? 1 : 0 to simply A. */
8041 if (integer_onep (TREE_OPERAND (t, 1))
8042 && integer_zerop (TREE_OPERAND (t, 2))
8043 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8044 call to fold will try to move the conversion inside
8045 a COND, which will recurse. In that case, the COND_EXPR
8046 is probably the best choice, so leave it alone. */
8047 && type == TREE_TYPE (arg0))
8048 return pedantic_non_lvalue (arg0);
8049
8050 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8051 over COND_EXPR in cases such as floating point comparisons. */
8052 if (integer_zerop (TREE_OPERAND (t, 1))
8053 && integer_onep (TREE_OPERAND (t, 2))
8054 && truth_value_p (TREE_CODE (arg0)))
8055 return pedantic_non_lvalue (convert (type,
8056 invert_truthvalue (arg0)));
8057
8058 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8059 operation is simply A & 2. */
8060
8061 if (integer_zerop (TREE_OPERAND (t, 2))
8062 && TREE_CODE (arg0) == NE_EXPR
8063 && integer_zerop (TREE_OPERAND (arg0, 1))
8064 && integer_pow2p (arg1)
8065 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8066 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8067 arg1, 1))
8068 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
8069
8070 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8071 if (integer_zerop (TREE_OPERAND (t, 2))
8072 && truth_value_p (TREE_CODE (arg0))
8073 && truth_value_p (TREE_CODE (arg1)))
8074 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
8075 arg0, arg1)));
8076
8077 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8078 if (integer_onep (TREE_OPERAND (t, 2))
8079 && truth_value_p (TREE_CODE (arg0))
8080 && truth_value_p (TREE_CODE (arg1)))
8081 {
8082 /* Only perform transformation if ARG0 is easily inverted. */
8083 tem = invert_truthvalue (arg0);
8084 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8085 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
8086 tem, arg1)));
8087 }
8088
8089 return t;
8090
8091 case COMPOUND_EXPR:
8092 /* When pedantic, a compound expression can be neither an lvalue
8093 nor an integer constant expression. */
8094 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
8095 return t;
8096 /* Don't let (0, 0) be null pointer constant. */
8097 if (integer_zerop (arg1))
8098 return build1 (NOP_EXPR, type, arg1);
8099 return convert (type, arg1);
8100
8101 case COMPLEX_EXPR:
8102 if (wins)
8103 return build_complex (type, arg0, arg1);
8104 return t;
8105
8106 case REALPART_EXPR:
8107 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8108 return t;
8109 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8110 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8111 TREE_OPERAND (arg0, 1));
8112 else if (TREE_CODE (arg0) == COMPLEX_CST)
8113 return TREE_REALPART (arg0);
8114 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8115 return fold (build (TREE_CODE (arg0), type,
8116 fold (build1 (REALPART_EXPR, type,
8117 TREE_OPERAND (arg0, 0))),
8118 fold (build1 (REALPART_EXPR,
8119 type, TREE_OPERAND (arg0, 1)))));
8120 return t;
8121
8122 case IMAGPART_EXPR:
8123 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8124 return convert (type, integer_zero_node);
8125 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8126 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8127 TREE_OPERAND (arg0, 0));
8128 else if (TREE_CODE (arg0) == COMPLEX_CST)
8129 return TREE_IMAGPART (arg0);
8130 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8131 return fold (build (TREE_CODE (arg0), type,
8132 fold (build1 (IMAGPART_EXPR, type,
8133 TREE_OPERAND (arg0, 0))),
8134 fold (build1 (IMAGPART_EXPR, type,
8135 TREE_OPERAND (arg0, 1)))));
8136 return t;
8137
8138 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8139 appropriate. */
8140 case CLEANUP_POINT_EXPR:
8141 if (! has_cleanups (arg0))
8142 return TREE_OPERAND (t, 0);
8143
8144 {
8145 enum tree_code code0 = TREE_CODE (arg0);
8146 int kind0 = TREE_CODE_CLASS (code0);
8147 tree arg00 = TREE_OPERAND (arg0, 0);
8148 tree arg01;
8149
8150 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8151 return fold (build1 (code0, type,
8152 fold (build1 (CLEANUP_POINT_EXPR,
8153 TREE_TYPE (arg00), arg00))));
8154
8155 if (kind0 == '<' || kind0 == '2'
8156 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8157 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8158 || code0 == TRUTH_XOR_EXPR)
8159 {
8160 arg01 = TREE_OPERAND (arg0, 1);
8161
8162 if (TREE_CONSTANT (arg00)
8163 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8164 && ! has_cleanups (arg00)))
8165 return fold (build (code0, type, arg00,
8166 fold (build1 (CLEANUP_POINT_EXPR,
8167 TREE_TYPE (arg01), arg01))));
8168
8169 if (TREE_CONSTANT (arg01))
8170 return fold (build (code0, type,
8171 fold (build1 (CLEANUP_POINT_EXPR,
8172 TREE_TYPE (arg00), arg00)),
8173 arg01));
8174 }
8175
8176 return t;
8177 }
8178
8179 case CALL_EXPR:
8180 /* Check for a built-in function. */
8181 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
8182 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
8183 == FUNCTION_DECL)
8184 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
8185 {
8186 tree tmp = fold_builtin (expr);
8187 if (tmp)
8188 return tmp;
8189 }
8190 return t;
8191
8192 default:
8193 return t;
8194 } /* switch (code) */
8195 }
8196
8197 #ifdef ENABLE_FOLD_CHECKING
8198 #undef fold
8199
8200 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8201 static void fold_check_failed (tree, tree);
8202 void print_fold_checksum (tree);
8203
8204 /* When --enable-checking=fold, compute a digest of expr before
8205 and after actual fold call to see if fold did not accidentally
8206 change original expr. */
8207
8208 tree
8209 fold (tree expr)
8210 {
8211 tree ret;
8212 struct md5_ctx ctx;
8213 unsigned char checksum_before[16], checksum_after[16];
8214 htab_t ht;
8215
8216 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8217 md5_init_ctx (&ctx);
8218 fold_checksum_tree (expr, &ctx, ht);
8219 md5_finish_ctx (&ctx, checksum_before);
8220 htab_empty (ht);
8221
8222 ret = fold_1 (expr);
8223
8224 md5_init_ctx (&ctx);
8225 fold_checksum_tree (expr, &ctx, ht);
8226 md5_finish_ctx (&ctx, checksum_after);
8227 htab_delete (ht);
8228
8229 if (memcmp (checksum_before, checksum_after, 16))
8230 fold_check_failed (expr, ret);
8231
8232 return ret;
8233 }
8234
8235 void
8236 print_fold_checksum (tree expr)
8237 {
8238 struct md5_ctx ctx;
8239 unsigned char checksum[16], cnt;
8240 htab_t ht;
8241
8242 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8243 md5_init_ctx (&ctx);
8244 fold_checksum_tree (expr, &ctx, ht);
8245 md5_finish_ctx (&ctx, checksum);
8246 htab_delete (ht);
8247 for (cnt = 0; cnt < 16; ++cnt)
8248 fprintf (stderr, "%02x", checksum[cnt]);
8249 putc ('\n', stderr);
8250 }
8251
8252 static void
8253 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8254 {
8255 internal_error ("fold check: original tree changed by fold");
8256 }
8257
8258 static void
8259 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8260 {
8261 void **slot;
8262 enum tree_code code;
8263 char buf[sizeof (struct tree_decl)];
8264 int i, len;
8265
8266 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8267 > sizeof (struct tree_decl)
8268 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8269 abort ();
8270 if (expr == NULL)
8271 return;
8272 slot = htab_find_slot (ht, expr, INSERT);
8273 if (*slot != NULL)
8274 return;
8275 *slot = expr;
8276 code = TREE_CODE (expr);
8277 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8278 {
8279 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8280 memcpy (buf, expr, tree_size (expr));
8281 expr = (tree) buf;
8282 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8283 }
8284 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8285 {
8286 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8287 memcpy (buf, expr, tree_size (expr));
8288 expr = (tree) buf;
8289 SET_DECL_ASSEMBLER_NAME (expr, NULL);
8290 }
8291 else if (TREE_CODE_CLASS (code) == 't'
8292 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8293 {
8294 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8295 memcpy (buf, expr, tree_size (expr));
8296 expr = (tree) buf;
8297 TYPE_POINTER_TO (expr) = NULL;
8298 TYPE_REFERENCE_TO (expr) = NULL;
8299 }
8300 md5_process_bytes (expr, tree_size (expr), ctx);
8301 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8302 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8303 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8304 len = TREE_CODE_LENGTH (code);
8305 switch (TREE_CODE_CLASS (code))
8306 {
8307 case 'c':
8308 switch (code)
8309 {
8310 case STRING_CST:
8311 md5_process_bytes (TREE_STRING_POINTER (expr),
8312 TREE_STRING_LENGTH (expr), ctx);
8313 break;
8314 case COMPLEX_CST:
8315 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8316 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8317 break;
8318 case VECTOR_CST:
8319 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8320 break;
8321 default:
8322 break;
8323 }
8324 break;
8325 case 'x':
8326 switch (code)
8327 {
8328 case TREE_LIST:
8329 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8330 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8331 break;
8332 case TREE_VEC:
8333 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8334 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8335 break;
8336 default:
8337 break;
8338 }
8339 break;
8340 case 'e':
8341 switch (code)
8342 {
8343 case SAVE_EXPR: len = 2; break;
8344 case GOTO_SUBROUTINE_EXPR: len = 0; break;
8345 case RTL_EXPR: len = 0; break;
8346 case WITH_CLEANUP_EXPR: len = 2; break;
8347 default: break;
8348 }
8349 /* FALLTHROUGH */
8350 case 'r':
8351 case '<':
8352 case '1':
8353 case '2':
8354 case 's':
8355 for (i = 0; i < len; ++i)
8356 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8357 break;
8358 case 'd':
8359 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8360 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8361 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8362 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8363 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8364 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8365 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8366 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8367 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8368 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8369 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8370 break;
8371 case 't':
8372 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
8373 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
8374 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
8375 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
8376 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
8377 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
8378 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
8379 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
8380 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
8381 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
8382 break;
8383 default:
8384 break;
8385 }
8386 }
8387
8388 #endif
8389
8390 /* Perform constant folding and related simplification of initializer
8391 expression EXPR. This behaves identically to "fold" but ignores
8392 potential run-time traps and exceptions that fold must preserve. */
8393
8394 tree
8395 fold_initializer (tree expr)
8396 {
8397 int saved_signaling_nans = flag_signaling_nans;
8398 int saved_trapping_math = flag_trapping_math;
8399 int saved_trapv = flag_trapv;
8400 tree result;
8401
8402 flag_signaling_nans = 0;
8403 flag_trapping_math = 0;
8404 flag_trapv = 0;
8405
8406 result = fold (expr);
8407
8408 flag_signaling_nans = saved_signaling_nans;
8409 flag_trapping_math = saved_trapping_math;
8410 flag_trapv = saved_trapv;
8411
8412 return result;
8413 }
8414
8415 /* Determine if first argument is a multiple of second argument. Return 0 if
8416 it is not, or we cannot easily determined it to be.
8417
8418 An example of the sort of thing we care about (at this point; this routine
8419 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8420 fold cases do now) is discovering that
8421
8422 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8423
8424 is a multiple of
8425
8426 SAVE_EXPR (J * 8)
8427
8428 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8429
8430 This code also handles discovering that
8431
8432 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8433
8434 is a multiple of 8 so we don't have to worry about dealing with a
8435 possible remainder.
8436
8437 Note that we *look* inside a SAVE_EXPR only to determine how it was
8438 calculated; it is not safe for fold to do much of anything else with the
8439 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8440 at run time. For example, the latter example above *cannot* be implemented
8441 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8442 evaluation time of the original SAVE_EXPR is not necessarily the same at
8443 the time the new expression is evaluated. The only optimization of this
8444 sort that would be valid is changing
8445
8446 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8447
8448 divided by 8 to
8449
8450 SAVE_EXPR (I) * SAVE_EXPR (J)
8451
8452 (where the same SAVE_EXPR (J) is used in the original and the
8453 transformed version). */
8454
8455 static int
8456 multiple_of_p (tree type, tree top, tree bottom)
8457 {
8458 if (operand_equal_p (top, bottom, 0))
8459 return 1;
8460
8461 if (TREE_CODE (type) != INTEGER_TYPE)
8462 return 0;
8463
8464 switch (TREE_CODE (top))
8465 {
8466 case MULT_EXPR:
8467 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8468 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8469
8470 case PLUS_EXPR:
8471 case MINUS_EXPR:
8472 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8473 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8474
8475 case LSHIFT_EXPR:
8476 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
8477 {
8478 tree op1, t1;
8479
8480 op1 = TREE_OPERAND (top, 1);
8481 /* const_binop may not detect overflow correctly,
8482 so check for it explicitly here. */
8483 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
8484 > TREE_INT_CST_LOW (op1)
8485 && TREE_INT_CST_HIGH (op1) == 0
8486 && 0 != (t1 = convert (type,
8487 const_binop (LSHIFT_EXPR, size_one_node,
8488 op1, 0)))
8489 && ! TREE_OVERFLOW (t1))
8490 return multiple_of_p (type, t1, bottom);
8491 }
8492 return 0;
8493
8494 case NOP_EXPR:
8495 /* Can't handle conversions from non-integral or wider integral type. */
8496 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
8497 || (TYPE_PRECISION (type)
8498 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
8499 return 0;
8500
8501 /* .. fall through ... */
8502
8503 case SAVE_EXPR:
8504 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
8505
8506 case INTEGER_CST:
8507 if (TREE_CODE (bottom) != INTEGER_CST
8508 || (TREE_UNSIGNED (type)
8509 && (tree_int_cst_sgn (top) < 0
8510 || tree_int_cst_sgn (bottom) < 0)))
8511 return 0;
8512 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
8513 top, bottom, 0));
8514
8515 default:
8516 return 0;
8517 }
8518 }
8519
8520 /* Return true if `t' is known to be non-negative. */
8521
8522 int
8523 tree_expr_nonnegative_p (tree t)
8524 {
8525 switch (TREE_CODE (t))
8526 {
8527 case ABS_EXPR:
8528 return 1;
8529
8530 case INTEGER_CST:
8531 return tree_int_cst_sgn (t) >= 0;
8532
8533 case REAL_CST:
8534 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
8535
8536 case PLUS_EXPR:
8537 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8538 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8539 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8540
8541 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8542 both unsigned and at least 2 bits shorter than the result. */
8543 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8544 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8545 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8546 {
8547 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8548 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8549 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8550 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8551 {
8552 unsigned int prec = MAX (TYPE_PRECISION (inner1),
8553 TYPE_PRECISION (inner2)) + 1;
8554 return prec < TYPE_PRECISION (TREE_TYPE (t));
8555 }
8556 }
8557 break;
8558
8559 case MULT_EXPR:
8560 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8561 {
8562 /* x * x for floating point x is always non-negative. */
8563 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8564 return 1;
8565 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8566 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8567 }
8568
8569 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8570 both unsigned and their total bits is shorter than the result. */
8571 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8572 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8573 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8574 {
8575 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8576 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8577 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8578 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8579 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8580 < TYPE_PRECISION (TREE_TYPE (t));
8581 }
8582 return 0;
8583
8584 case TRUNC_DIV_EXPR:
8585 case CEIL_DIV_EXPR:
8586 case FLOOR_DIV_EXPR:
8587 case ROUND_DIV_EXPR:
8588 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8589 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8590
8591 case TRUNC_MOD_EXPR:
8592 case CEIL_MOD_EXPR:
8593 case FLOOR_MOD_EXPR:
8594 case ROUND_MOD_EXPR:
8595 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8596
8597 case RDIV_EXPR:
8598 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8599 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8600
8601 case NOP_EXPR:
8602 {
8603 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8604 tree outer_type = TREE_TYPE (t);
8605
8606 if (TREE_CODE (outer_type) == REAL_TYPE)
8607 {
8608 if (TREE_CODE (inner_type) == REAL_TYPE)
8609 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8610 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8611 {
8612 if (TREE_UNSIGNED (inner_type))
8613 return 1;
8614 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8615 }
8616 }
8617 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8618 {
8619 if (TREE_CODE (inner_type) == REAL_TYPE)
8620 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8621 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8622 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8623 && TREE_UNSIGNED (inner_type);
8624 }
8625 }
8626 break;
8627
8628 case COND_EXPR:
8629 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8630 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
8631 case COMPOUND_EXPR:
8632 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8633 case MIN_EXPR:
8634 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8635 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8636 case MAX_EXPR:
8637 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8638 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8639 case MODIFY_EXPR:
8640 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8641 case BIND_EXPR:
8642 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8643 case SAVE_EXPR:
8644 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8645 case NON_LVALUE_EXPR:
8646 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8647 case FLOAT_EXPR:
8648 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8649 case RTL_EXPR:
8650 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
8651
8652 case CALL_EXPR:
8653 {
8654 tree fndecl = get_callee_fndecl (t);
8655 tree arglist = TREE_OPERAND (t, 1);
8656 if (fndecl
8657 && DECL_BUILT_IN (fndecl)
8658 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
8659 switch (DECL_FUNCTION_CODE (fndecl))
8660 {
8661 case BUILT_IN_CABS:
8662 case BUILT_IN_CABSL:
8663 case BUILT_IN_CABSF:
8664 case BUILT_IN_EXP:
8665 case BUILT_IN_EXPF:
8666 case BUILT_IN_EXPL:
8667 case BUILT_IN_EXP2:
8668 case BUILT_IN_EXP2F:
8669 case BUILT_IN_EXP2L:
8670 case BUILT_IN_EXP10:
8671 case BUILT_IN_EXP10F:
8672 case BUILT_IN_EXP10L:
8673 case BUILT_IN_FABS:
8674 case BUILT_IN_FABSF:
8675 case BUILT_IN_FABSL:
8676 case BUILT_IN_FFS:
8677 case BUILT_IN_FFSL:
8678 case BUILT_IN_FFSLL:
8679 case BUILT_IN_PARITY:
8680 case BUILT_IN_PARITYL:
8681 case BUILT_IN_PARITYLL:
8682 case BUILT_IN_POPCOUNT:
8683 case BUILT_IN_POPCOUNTL:
8684 case BUILT_IN_POPCOUNTLL:
8685 case BUILT_IN_POW10:
8686 case BUILT_IN_POW10F:
8687 case BUILT_IN_POW10L:
8688 case BUILT_IN_SQRT:
8689 case BUILT_IN_SQRTF:
8690 case BUILT_IN_SQRTL:
8691 return 1;
8692
8693 case BUILT_IN_ATAN:
8694 case BUILT_IN_ATANF:
8695 case BUILT_IN_ATANL:
8696 case BUILT_IN_CEIL:
8697 case BUILT_IN_CEILF:
8698 case BUILT_IN_CEILL:
8699 case BUILT_IN_FLOOR:
8700 case BUILT_IN_FLOORF:
8701 case BUILT_IN_FLOORL:
8702 case BUILT_IN_NEARBYINT:
8703 case BUILT_IN_NEARBYINTF:
8704 case BUILT_IN_NEARBYINTL:
8705 case BUILT_IN_ROUND:
8706 case BUILT_IN_ROUNDF:
8707 case BUILT_IN_ROUNDL:
8708 case BUILT_IN_TRUNC:
8709 case BUILT_IN_TRUNCF:
8710 case BUILT_IN_TRUNCL:
8711 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8712
8713 case BUILT_IN_POW:
8714 case BUILT_IN_POWF:
8715 case BUILT_IN_POWL:
8716 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8717
8718 default:
8719 break;
8720 }
8721 }
8722
8723 /* ... fall through ... */
8724
8725 default:
8726 if (truth_value_p (TREE_CODE (t)))
8727 /* Truth values evaluate to 0 or 1, which is nonnegative. */
8728 return 1;
8729 }
8730
8731 /* We don't know sign of `t', so be conservative and return false. */
8732 return 0;
8733 }
8734
8735 /* Return true if `r' is known to be non-negative.
8736 Only handles constants at the moment. */
8737
8738 int
8739 rtl_expr_nonnegative_p (rtx r)
8740 {
8741 switch (GET_CODE (r))
8742 {
8743 case CONST_INT:
8744 return INTVAL (r) >= 0;
8745
8746 case CONST_DOUBLE:
8747 if (GET_MODE (r) == VOIDmode)
8748 return CONST_DOUBLE_HIGH (r) >= 0;
8749 return 0;
8750
8751 case CONST_VECTOR:
8752 {
8753 int units, i;
8754 rtx elt;
8755
8756 units = CONST_VECTOR_NUNITS (r);
8757
8758 for (i = 0; i < units; ++i)
8759 {
8760 elt = CONST_VECTOR_ELT (r, i);
8761 if (!rtl_expr_nonnegative_p (elt))
8762 return 0;
8763 }
8764
8765 return 1;
8766 }
8767
8768 case SYMBOL_REF:
8769 case LABEL_REF:
8770 /* These are always nonnegative. */
8771 return 1;
8772
8773 default:
8774 return 0;
8775 }
8776 }
8777
8778 #include "gt-fold-const.h"