builtins.c (expand_builtin_mathfn): Use get_callee_fndecl.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "real.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "toplev.h"
56 #include "ggc.h"
57 #include "hashtab.h"
58 #include "langhooks.h"
59 #include "md5.h"
60
61 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
62 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
63 static bool negate_expr_p (tree);
64 static tree negate_expr (tree);
65 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
66 static tree associate_trees (tree, tree, enum tree_code, tree);
67 static tree int_const_binop (enum tree_code, tree, tree, int);
68 static tree const_binop (enum tree_code, tree, tree, int);
69 static hashval_t size_htab_hash (const void *);
70 static int size_htab_eq (const void *, const void *);
71 static tree fold_convert (tree, tree);
72 static enum tree_code invert_tree_comparison (enum tree_code);
73 static enum tree_code swap_tree_comparison (enum tree_code);
74 static int comparison_to_compcode (enum tree_code);
75 static enum tree_code compcode_to_comparison (int);
76 static int truth_value_p (enum tree_code);
77 static int operand_equal_for_comparison_p (tree, tree, tree);
78 static int twoval_comparison_p (tree, tree *, tree *, int *);
79 static tree eval_subst (tree, tree, tree, tree, tree);
80 static tree pedantic_omit_one_operand (tree, tree, tree);
81 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
82 static tree make_bit_field_ref (tree, tree, int, int, int);
83 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
84 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
85 enum machine_mode *, int *, int *,
86 tree *, tree *);
87 static int all_ones_mask_p (tree, int);
88 static tree sign_bit_p (tree, tree);
89 static int simple_operand_p (tree);
90 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
91 static tree make_range (tree, int *, tree *, tree *);
92 static tree build_range_check (tree, tree, int, tree, tree);
93 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
94 tree);
95 static tree fold_range_test (tree);
96 static tree unextend (tree, int, int, tree);
97 static tree fold_truthop (enum tree_code, tree, tree, tree);
98 static tree optimize_minmax_comparison (tree);
99 static tree extract_muldiv (tree, tree, enum tree_code, tree);
100 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
101 static tree strip_compound_expr (tree, tree);
102 static int multiple_of_p (tree, tree, tree);
103 static tree constant_boolean_node (int, tree);
104 static int count_cond (tree, int);
105 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
106 tree, int);
107 static bool fold_real_zero_addition_p (tree, tree, int);
108 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
109 tree, tree, tree);
110 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
111
112 /* The following constants represent a bit based encoding of GCC's
113 comparison operators. This encoding simplifies transformations
114 on relational comparison operators, such as AND and OR. */
115 #define COMPCODE_FALSE 0
116 #define COMPCODE_LT 1
117 #define COMPCODE_EQ 2
118 #define COMPCODE_LE 3
119 #define COMPCODE_GT 4
120 #define COMPCODE_NE 5
121 #define COMPCODE_GE 6
122 #define COMPCODE_TRUE 7
123
124 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
125 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
126 and SUM1. Then this yields nonzero if overflow occurred during the
127 addition.
128
129 Overflow occurs if A and B have the same sign, but A and SUM differ in
130 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
131 sign. */
132 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
133 \f
134 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
135 We do that by representing the two-word integer in 4 words, with only
136 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
137 number. The value of the word is LOWPART + HIGHPART * BASE. */
138
139 #define LOWPART(x) \
140 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
141 #define HIGHPART(x) \
142 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
143 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
144
145 /* Unpack a two-word integer into 4 words.
146 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
147 WORDS points to the array of HOST_WIDE_INTs. */
148
149 static void
150 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
151 {
152 words[0] = LOWPART (low);
153 words[1] = HIGHPART (low);
154 words[2] = LOWPART (hi);
155 words[3] = HIGHPART (hi);
156 }
157
158 /* Pack an array of 4 words into a two-word integer.
159 WORDS points to the array of words.
160 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
161
162 static void
163 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
164 HOST_WIDE_INT *hi)
165 {
166 *low = words[0] + words[1] * BASE;
167 *hi = words[2] + words[3] * BASE;
168 }
169 \f
170 /* Make the integer constant T valid for its type by setting to 0 or 1 all
171 the bits in the constant that don't belong in the type.
172
173 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
174 nonzero, a signed overflow has already occurred in calculating T, so
175 propagate it. */
176
177 int
178 force_fit_type (tree t, int overflow)
179 {
180 unsigned HOST_WIDE_INT low;
181 HOST_WIDE_INT high;
182 unsigned int prec;
183
184 if (TREE_CODE (t) == REAL_CST)
185 {
186 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
187 Consider doing it via real_convert now. */
188 return overflow;
189 }
190
191 else if (TREE_CODE (t) != INTEGER_CST)
192 return overflow;
193
194 low = TREE_INT_CST_LOW (t);
195 high = TREE_INT_CST_HIGH (t);
196
197 if (POINTER_TYPE_P (TREE_TYPE (t))
198 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
199 prec = POINTER_SIZE;
200 else
201 prec = TYPE_PRECISION (TREE_TYPE (t));
202
203 /* First clear all bits that are beyond the type's precision. */
204
205 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
206 ;
207 else if (prec > HOST_BITS_PER_WIDE_INT)
208 TREE_INT_CST_HIGH (t)
209 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
210 else
211 {
212 TREE_INT_CST_HIGH (t) = 0;
213 if (prec < HOST_BITS_PER_WIDE_INT)
214 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
215 }
216
217 /* Unsigned types do not suffer sign extension or overflow unless they
218 are a sizetype. */
219 if (TREE_UNSIGNED (TREE_TYPE (t))
220 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
221 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
222 return overflow;
223
224 /* If the value's sign bit is set, extend the sign. */
225 if (prec != 2 * HOST_BITS_PER_WIDE_INT
226 && (prec > HOST_BITS_PER_WIDE_INT
227 ? 0 != (TREE_INT_CST_HIGH (t)
228 & ((HOST_WIDE_INT) 1
229 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
230 : 0 != (TREE_INT_CST_LOW (t)
231 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
232 {
233 /* Value is negative:
234 set to 1 all the bits that are outside this type's precision. */
235 if (prec > HOST_BITS_PER_WIDE_INT)
236 TREE_INT_CST_HIGH (t)
237 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
238 else
239 {
240 TREE_INT_CST_HIGH (t) = -1;
241 if (prec < HOST_BITS_PER_WIDE_INT)
242 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
243 }
244 }
245
246 /* Return nonzero if signed overflow occurred. */
247 return
248 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
249 != 0);
250 }
251 \f
252 /* Add two doubleword integers with doubleword result.
253 Each argument is given as two `HOST_WIDE_INT' pieces.
254 One argument is L1 and H1; the other, L2 and H2.
255 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
256
257 int
258 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
259 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
260 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
261 {
262 unsigned HOST_WIDE_INT l;
263 HOST_WIDE_INT h;
264
265 l = l1 + l2;
266 h = h1 + h2 + (l < l1);
267
268 *lv = l;
269 *hv = h;
270 return OVERFLOW_SUM_SIGN (h1, h2, h);
271 }
272
273 /* Negate a doubleword integer with doubleword result.
274 Return nonzero if the operation overflows, assuming it's signed.
275 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
276 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
277
278 int
279 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
280 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
281 {
282 if (l1 == 0)
283 {
284 *lv = 0;
285 *hv = - h1;
286 return (*hv & h1) < 0;
287 }
288 else
289 {
290 *lv = -l1;
291 *hv = ~h1;
292 return 0;
293 }
294 }
295 \f
296 /* Multiply two doubleword integers with doubleword result.
297 Return nonzero if the operation overflows, assuming it's signed.
298 Each argument is given as two `HOST_WIDE_INT' pieces.
299 One argument is L1 and H1; the other, L2 and H2.
300 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
301
302 int
303 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
304 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
305 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
306 {
307 HOST_WIDE_INT arg1[4];
308 HOST_WIDE_INT arg2[4];
309 HOST_WIDE_INT prod[4 * 2];
310 unsigned HOST_WIDE_INT carry;
311 int i, j, k;
312 unsigned HOST_WIDE_INT toplow, neglow;
313 HOST_WIDE_INT tophigh, neghigh;
314
315 encode (arg1, l1, h1);
316 encode (arg2, l2, h2);
317
318 memset (prod, 0, sizeof prod);
319
320 for (i = 0; i < 4; i++)
321 {
322 carry = 0;
323 for (j = 0; j < 4; j++)
324 {
325 k = i + j;
326 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
327 carry += arg1[i] * arg2[j];
328 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
329 carry += prod[k];
330 prod[k] = LOWPART (carry);
331 carry = HIGHPART (carry);
332 }
333 prod[i + 4] = carry;
334 }
335
336 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
337
338 /* Check for overflow by calculating the top half of the answer in full;
339 it should agree with the low half's sign bit. */
340 decode (prod + 4, &toplow, &tophigh);
341 if (h1 < 0)
342 {
343 neg_double (l2, h2, &neglow, &neghigh);
344 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
345 }
346 if (h2 < 0)
347 {
348 neg_double (l1, h1, &neglow, &neghigh);
349 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
350 }
351 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
352 }
353 \f
354 /* Shift the doubleword integer in L1, H1 left by COUNT places
355 keeping only PREC bits of result.
356 Shift right if COUNT is negative.
357 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
358 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
359
360 void
361 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
362 HOST_WIDE_INT count, unsigned int prec,
363 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
364 {
365 unsigned HOST_WIDE_INT signmask;
366
367 if (count < 0)
368 {
369 rshift_double (l1, h1, -count, prec, lv, hv, arith);
370 return;
371 }
372
373 #ifdef SHIFT_COUNT_TRUNCATED
374 if (SHIFT_COUNT_TRUNCATED)
375 count %= prec;
376 #endif
377
378 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
379 {
380 /* Shifting by the host word size is undefined according to the
381 ANSI standard, so we must handle this as a special case. */
382 *hv = 0;
383 *lv = 0;
384 }
385 else if (count >= HOST_BITS_PER_WIDE_INT)
386 {
387 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
388 *lv = 0;
389 }
390 else
391 {
392 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
393 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
394 *lv = l1 << count;
395 }
396
397 /* Sign extend all bits that are beyond the precision. */
398
399 signmask = -((prec > HOST_BITS_PER_WIDE_INT
400 ? ((unsigned HOST_WIDE_INT) *hv
401 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
402 : (*lv >> (prec - 1))) & 1);
403
404 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
405 ;
406 else if (prec >= HOST_BITS_PER_WIDE_INT)
407 {
408 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
409 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
410 }
411 else
412 {
413 *hv = signmask;
414 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
415 *lv |= signmask << prec;
416 }
417 }
418
419 /* Shift the doubleword integer in L1, H1 right by COUNT places
420 keeping only PREC bits of result. COUNT must be positive.
421 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
422 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
423
424 void
425 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
426 HOST_WIDE_INT count, unsigned int prec,
427 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
428 int arith)
429 {
430 unsigned HOST_WIDE_INT signmask;
431
432 signmask = (arith
433 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
434 : 0);
435
436 #ifdef SHIFT_COUNT_TRUNCATED
437 if (SHIFT_COUNT_TRUNCATED)
438 count %= prec;
439 #endif
440
441 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
442 {
443 /* Shifting by the host word size is undefined according to the
444 ANSI standard, so we must handle this as a special case. */
445 *hv = 0;
446 *lv = 0;
447 }
448 else if (count >= HOST_BITS_PER_WIDE_INT)
449 {
450 *hv = 0;
451 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
452 }
453 else
454 {
455 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
456 *lv = ((l1 >> count)
457 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
458 }
459
460 /* Zero / sign extend all bits that are beyond the precision. */
461
462 if (count >= (HOST_WIDE_INT)prec)
463 {
464 *hv = signmask;
465 *lv = signmask;
466 }
467 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
468 ;
469 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
470 {
471 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
472 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
473 }
474 else
475 {
476 *hv = signmask;
477 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
478 *lv |= signmask << (prec - count);
479 }
480 }
481 \f
482 /* Rotate the doubleword integer in L1, H1 left by COUNT places
483 keeping only PREC bits of result.
484 Rotate right if COUNT is negative.
485 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
486
487 void
488 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
489 HOST_WIDE_INT count, unsigned int prec,
490 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
491 {
492 unsigned HOST_WIDE_INT s1l, s2l;
493 HOST_WIDE_INT s1h, s2h;
494
495 count %= prec;
496 if (count < 0)
497 count += prec;
498
499 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
500 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
501 *lv = s1l | s2l;
502 *hv = s1h | s2h;
503 }
504
505 /* Rotate the doubleword integer in L1, H1 left by COUNT places
506 keeping only PREC bits of result. COUNT must be positive.
507 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
508
509 void
510 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
511 HOST_WIDE_INT count, unsigned int prec,
512 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
513 {
514 unsigned HOST_WIDE_INT s1l, s2l;
515 HOST_WIDE_INT s1h, s2h;
516
517 count %= prec;
518 if (count < 0)
519 count += prec;
520
521 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
522 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
523 *lv = s1l | s2l;
524 *hv = s1h | s2h;
525 }
526 \f
527 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
528 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
529 CODE is a tree code for a kind of division, one of
530 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
531 or EXACT_DIV_EXPR
532 It controls how the quotient is rounded to an integer.
533 Return nonzero if the operation overflows.
534 UNS nonzero says do unsigned division. */
535
536 int
537 div_and_round_double (enum tree_code code, int uns,
538 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
539 HOST_WIDE_INT hnum_orig,
540 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
541 HOST_WIDE_INT hden_orig,
542 unsigned HOST_WIDE_INT *lquo,
543 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
544 HOST_WIDE_INT *hrem)
545 {
546 int quo_neg = 0;
547 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
548 HOST_WIDE_INT den[4], quo[4];
549 int i, j;
550 unsigned HOST_WIDE_INT work;
551 unsigned HOST_WIDE_INT carry = 0;
552 unsigned HOST_WIDE_INT lnum = lnum_orig;
553 HOST_WIDE_INT hnum = hnum_orig;
554 unsigned HOST_WIDE_INT lden = lden_orig;
555 HOST_WIDE_INT hden = hden_orig;
556 int overflow = 0;
557
558 if (hden == 0 && lden == 0)
559 overflow = 1, lden = 1;
560
561 /* calculate quotient sign and convert operands to unsigned. */
562 if (!uns)
563 {
564 if (hnum < 0)
565 {
566 quo_neg = ~ quo_neg;
567 /* (minimum integer) / (-1) is the only overflow case. */
568 if (neg_double (lnum, hnum, &lnum, &hnum)
569 && ((HOST_WIDE_INT) lden & hden) == -1)
570 overflow = 1;
571 }
572 if (hden < 0)
573 {
574 quo_neg = ~ quo_neg;
575 neg_double (lden, hden, &lden, &hden);
576 }
577 }
578
579 if (hnum == 0 && hden == 0)
580 { /* single precision */
581 *hquo = *hrem = 0;
582 /* This unsigned division rounds toward zero. */
583 *lquo = lnum / lden;
584 goto finish_up;
585 }
586
587 if (hnum == 0)
588 { /* trivial case: dividend < divisor */
589 /* hden != 0 already checked. */
590 *hquo = *lquo = 0;
591 *hrem = hnum;
592 *lrem = lnum;
593 goto finish_up;
594 }
595
596 memset (quo, 0, sizeof quo);
597
598 memset (num, 0, sizeof num); /* to zero 9th element */
599 memset (den, 0, sizeof den);
600
601 encode (num, lnum, hnum);
602 encode (den, lden, hden);
603
604 /* Special code for when the divisor < BASE. */
605 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
606 {
607 /* hnum != 0 already checked. */
608 for (i = 4 - 1; i >= 0; i--)
609 {
610 work = num[i] + carry * BASE;
611 quo[i] = work / lden;
612 carry = work % lden;
613 }
614 }
615 else
616 {
617 /* Full double precision division,
618 with thanks to Don Knuth's "Seminumerical Algorithms". */
619 int num_hi_sig, den_hi_sig;
620 unsigned HOST_WIDE_INT quo_est, scale;
621
622 /* Find the highest nonzero divisor digit. */
623 for (i = 4 - 1;; i--)
624 if (den[i] != 0)
625 {
626 den_hi_sig = i;
627 break;
628 }
629
630 /* Insure that the first digit of the divisor is at least BASE/2.
631 This is required by the quotient digit estimation algorithm. */
632
633 scale = BASE / (den[den_hi_sig] + 1);
634 if (scale > 1)
635 { /* scale divisor and dividend */
636 carry = 0;
637 for (i = 0; i <= 4 - 1; i++)
638 {
639 work = (num[i] * scale) + carry;
640 num[i] = LOWPART (work);
641 carry = HIGHPART (work);
642 }
643
644 num[4] = carry;
645 carry = 0;
646 for (i = 0; i <= 4 - 1; i++)
647 {
648 work = (den[i] * scale) + carry;
649 den[i] = LOWPART (work);
650 carry = HIGHPART (work);
651 if (den[i] != 0) den_hi_sig = i;
652 }
653 }
654
655 num_hi_sig = 4;
656
657 /* Main loop */
658 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
659 {
660 /* Guess the next quotient digit, quo_est, by dividing the first
661 two remaining dividend digits by the high order quotient digit.
662 quo_est is never low and is at most 2 high. */
663 unsigned HOST_WIDE_INT tmp;
664
665 num_hi_sig = i + den_hi_sig + 1;
666 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
667 if (num[num_hi_sig] != den[den_hi_sig])
668 quo_est = work / den[den_hi_sig];
669 else
670 quo_est = BASE - 1;
671
672 /* Refine quo_est so it's usually correct, and at most one high. */
673 tmp = work - quo_est * den[den_hi_sig];
674 if (tmp < BASE
675 && (den[den_hi_sig - 1] * quo_est
676 > (tmp * BASE + num[num_hi_sig - 2])))
677 quo_est--;
678
679 /* Try QUO_EST as the quotient digit, by multiplying the
680 divisor by QUO_EST and subtracting from the remaining dividend.
681 Keep in mind that QUO_EST is the I - 1st digit. */
682
683 carry = 0;
684 for (j = 0; j <= den_hi_sig; j++)
685 {
686 work = quo_est * den[j] + carry;
687 carry = HIGHPART (work);
688 work = num[i + j] - LOWPART (work);
689 num[i + j] = LOWPART (work);
690 carry += HIGHPART (work) != 0;
691 }
692
693 /* If quo_est was high by one, then num[i] went negative and
694 we need to correct things. */
695 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
696 {
697 quo_est--;
698 carry = 0; /* add divisor back in */
699 for (j = 0; j <= den_hi_sig; j++)
700 {
701 work = num[i + j] + den[j] + carry;
702 carry = HIGHPART (work);
703 num[i + j] = LOWPART (work);
704 }
705
706 num [num_hi_sig] += carry;
707 }
708
709 /* Store the quotient digit. */
710 quo[i] = quo_est;
711 }
712 }
713
714 decode (quo, lquo, hquo);
715
716 finish_up:
717 /* If result is negative, make it so. */
718 if (quo_neg)
719 neg_double (*lquo, *hquo, lquo, hquo);
720
721 /* compute trial remainder: rem = num - (quo * den) */
722 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
723 neg_double (*lrem, *hrem, lrem, hrem);
724 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
725
726 switch (code)
727 {
728 case TRUNC_DIV_EXPR:
729 case TRUNC_MOD_EXPR: /* round toward zero */
730 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
731 return overflow;
732
733 case FLOOR_DIV_EXPR:
734 case FLOOR_MOD_EXPR: /* round toward negative infinity */
735 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
736 {
737 /* quo = quo - 1; */
738 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
739 lquo, hquo);
740 }
741 else
742 return overflow;
743 break;
744
745 case CEIL_DIV_EXPR:
746 case CEIL_MOD_EXPR: /* round toward positive infinity */
747 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
748 {
749 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
750 lquo, hquo);
751 }
752 else
753 return overflow;
754 break;
755
756 case ROUND_DIV_EXPR:
757 case ROUND_MOD_EXPR: /* round to closest integer */
758 {
759 unsigned HOST_WIDE_INT labs_rem = *lrem;
760 HOST_WIDE_INT habs_rem = *hrem;
761 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
762 HOST_WIDE_INT habs_den = hden, htwice;
763
764 /* Get absolute values. */
765 if (*hrem < 0)
766 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
767 if (hden < 0)
768 neg_double (lden, hden, &labs_den, &habs_den);
769
770 /* If (2 * abs (lrem) >= abs (lden)) */
771 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
772 labs_rem, habs_rem, &ltwice, &htwice);
773
774 if (((unsigned HOST_WIDE_INT) habs_den
775 < (unsigned HOST_WIDE_INT) htwice)
776 || (((unsigned HOST_WIDE_INT) habs_den
777 == (unsigned HOST_WIDE_INT) htwice)
778 && (labs_den < ltwice)))
779 {
780 if (*hquo < 0)
781 /* quo = quo - 1; */
782 add_double (*lquo, *hquo,
783 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
784 else
785 /* quo = quo + 1; */
786 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
787 lquo, hquo);
788 }
789 else
790 return overflow;
791 }
792 break;
793
794 default:
795 abort ();
796 }
797
798 /* compute true remainder: rem = num - (quo * den) */
799 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
800 neg_double (*lrem, *hrem, lrem, hrem);
801 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
802 return overflow;
803 }
804 \f
805 /* Determine whether an expression T can be cheaply negated using
806 the function negate_expr. */
807
808 static bool
809 negate_expr_p (tree t)
810 {
811 unsigned HOST_WIDE_INT val;
812 unsigned int prec;
813 tree type;
814
815 if (t == 0)
816 return false;
817
818 type = TREE_TYPE (t);
819
820 STRIP_SIGN_NOPS (t);
821 switch (TREE_CODE (t))
822 {
823 case INTEGER_CST:
824 if (TREE_UNSIGNED (type))
825 return false;
826
827 /* Check that -CST will not overflow type. */
828 prec = TYPE_PRECISION (type);
829 if (prec > HOST_BITS_PER_WIDE_INT)
830 {
831 if (TREE_INT_CST_LOW (t) != 0)
832 return true;
833 prec -= HOST_BITS_PER_WIDE_INT;
834 val = TREE_INT_CST_HIGH (t);
835 }
836 else
837 val = TREE_INT_CST_LOW (t);
838 if (prec < HOST_BITS_PER_WIDE_INT)
839 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
840 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
841
842 case REAL_CST:
843 case NEGATE_EXPR:
844 return true;
845
846 case MINUS_EXPR:
847 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
848 return ! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations;
849
850 case MULT_EXPR:
851 if (TREE_UNSIGNED (TREE_TYPE (t)))
852 break;
853
854 /* Fall through. */
855
856 case RDIV_EXPR:
857 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
858 return negate_expr_p (TREE_OPERAND (t, 1))
859 || negate_expr_p (TREE_OPERAND (t, 0));
860 break;
861
862 default:
863 break;
864 }
865 return false;
866 }
867
868 /* Given T, an expression, return the negation of T. Allow for T to be
869 null, in which case return null. */
870
871 static tree
872 negate_expr (tree t)
873 {
874 tree type;
875 tree tem;
876
877 if (t == 0)
878 return 0;
879
880 type = TREE_TYPE (t);
881 STRIP_SIGN_NOPS (t);
882
883 switch (TREE_CODE (t))
884 {
885 case INTEGER_CST:
886 if (! TREE_UNSIGNED (type)
887 && 0 != (tem = fold (build1 (NEGATE_EXPR, type, t)))
888 && ! TREE_OVERFLOW (tem))
889 return tem;
890 break;
891
892 case REAL_CST:
893 tem = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (t)));
894 /* Two's complement FP formats, such as c4x, may overflow. */
895 if (! TREE_OVERFLOW (tem))
896 return convert (type, tem);
897 break;
898
899 case NEGATE_EXPR:
900 return convert (type, TREE_OPERAND (t, 0));
901
902 case MINUS_EXPR:
903 /* - (A - B) -> B - A */
904 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
905 return convert (type,
906 fold (build (MINUS_EXPR, TREE_TYPE (t),
907 TREE_OPERAND (t, 1),
908 TREE_OPERAND (t, 0))));
909 break;
910
911 case MULT_EXPR:
912 if (TREE_UNSIGNED (TREE_TYPE (t)))
913 break;
914
915 /* Fall through. */
916
917 case RDIV_EXPR:
918 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
919 {
920 tem = TREE_OPERAND (t, 1);
921 if (negate_expr_p (tem))
922 return convert (type,
923 fold (build (TREE_CODE (t), TREE_TYPE (t),
924 TREE_OPERAND (t, 0),
925 negate_expr (tem))));
926 tem = TREE_OPERAND (t, 0);
927 if (negate_expr_p (tem))
928 return convert (type,
929 fold (build (TREE_CODE (t), TREE_TYPE (t),
930 negate_expr (tem),
931 TREE_OPERAND (t, 1))));
932 }
933 break;
934
935 default:
936 break;
937 }
938
939 return convert (type, fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t)));
940 }
941 \f
942 /* Split a tree IN into a constant, literal and variable parts that could be
943 combined with CODE to make IN. "constant" means an expression with
944 TREE_CONSTANT but that isn't an actual constant. CODE must be a
945 commutative arithmetic operation. Store the constant part into *CONP,
946 the literal in *LITP and return the variable part. If a part isn't
947 present, set it to null. If the tree does not decompose in this way,
948 return the entire tree as the variable part and the other parts as null.
949
950 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
951 case, we negate an operand that was subtracted. Except if it is a
952 literal for which we use *MINUS_LITP instead.
953
954 If NEGATE_P is true, we are negating all of IN, again except a literal
955 for which we use *MINUS_LITP instead.
956
957 If IN is itself a literal or constant, return it as appropriate.
958
959 Note that we do not guarantee that any of the three values will be the
960 same type as IN, but they will have the same signedness and mode. */
961
962 static tree
963 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
964 tree *minus_litp, int negate_p)
965 {
966 tree var = 0;
967
968 *conp = 0;
969 *litp = 0;
970 *minus_litp = 0;
971
972 /* Strip any conversions that don't change the machine mode or signedness. */
973 STRIP_SIGN_NOPS (in);
974
975 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
976 *litp = in;
977 else if (TREE_CODE (in) == code
978 || (! FLOAT_TYPE_P (TREE_TYPE (in))
979 /* We can associate addition and subtraction together (even
980 though the C standard doesn't say so) for integers because
981 the value is not affected. For reals, the value might be
982 affected, so we can't. */
983 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
984 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
985 {
986 tree op0 = TREE_OPERAND (in, 0);
987 tree op1 = TREE_OPERAND (in, 1);
988 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
989 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
990
991 /* First see if either of the operands is a literal, then a constant. */
992 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
993 *litp = op0, op0 = 0;
994 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
995 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
996
997 if (op0 != 0 && TREE_CONSTANT (op0))
998 *conp = op0, op0 = 0;
999 else if (op1 != 0 && TREE_CONSTANT (op1))
1000 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1001
1002 /* If we haven't dealt with either operand, this is not a case we can
1003 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1004 if (op0 != 0 && op1 != 0)
1005 var = in;
1006 else if (op0 != 0)
1007 var = op0;
1008 else
1009 var = op1, neg_var_p = neg1_p;
1010
1011 /* Now do any needed negations. */
1012 if (neg_litp_p)
1013 *minus_litp = *litp, *litp = 0;
1014 if (neg_conp_p)
1015 *conp = negate_expr (*conp);
1016 if (neg_var_p)
1017 var = negate_expr (var);
1018 }
1019 else if (TREE_CONSTANT (in))
1020 *conp = in;
1021 else
1022 var = in;
1023
1024 if (negate_p)
1025 {
1026 if (*litp)
1027 *minus_litp = *litp, *litp = 0;
1028 else if (*minus_litp)
1029 *litp = *minus_litp, *minus_litp = 0;
1030 *conp = negate_expr (*conp);
1031 var = negate_expr (var);
1032 }
1033
1034 return var;
1035 }
1036
1037 /* Re-associate trees split by the above function. T1 and T2 are either
1038 expressions to associate or null. Return the new expression, if any. If
1039 we build an operation, do it in TYPE and with CODE. */
1040
1041 static tree
1042 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1043 {
1044 if (t1 == 0)
1045 return t2;
1046 else if (t2 == 0)
1047 return t1;
1048
1049 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1050 try to fold this since we will have infinite recursion. But do
1051 deal with any NEGATE_EXPRs. */
1052 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1053 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1054 {
1055 if (code == PLUS_EXPR)
1056 {
1057 if (TREE_CODE (t1) == NEGATE_EXPR)
1058 return build (MINUS_EXPR, type, convert (type, t2),
1059 convert (type, TREE_OPERAND (t1, 0)));
1060 else if (TREE_CODE (t2) == NEGATE_EXPR)
1061 return build (MINUS_EXPR, type, convert (type, t1),
1062 convert (type, TREE_OPERAND (t2, 0)));
1063 }
1064 return build (code, type, convert (type, t1), convert (type, t2));
1065 }
1066
1067 return fold (build (code, type, convert (type, t1), convert (type, t2)));
1068 }
1069 \f
1070 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1071 to produce a new constant.
1072
1073 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1074
1075 static tree
1076 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1077 {
1078 unsigned HOST_WIDE_INT int1l, int2l;
1079 HOST_WIDE_INT int1h, int2h;
1080 unsigned HOST_WIDE_INT low;
1081 HOST_WIDE_INT hi;
1082 unsigned HOST_WIDE_INT garbagel;
1083 HOST_WIDE_INT garbageh;
1084 tree t;
1085 tree type = TREE_TYPE (arg1);
1086 int uns = TREE_UNSIGNED (type);
1087 int is_sizetype
1088 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1089 int overflow = 0;
1090 int no_overflow = 0;
1091
1092 int1l = TREE_INT_CST_LOW (arg1);
1093 int1h = TREE_INT_CST_HIGH (arg1);
1094 int2l = TREE_INT_CST_LOW (arg2);
1095 int2h = TREE_INT_CST_HIGH (arg2);
1096
1097 switch (code)
1098 {
1099 case BIT_IOR_EXPR:
1100 low = int1l | int2l, hi = int1h | int2h;
1101 break;
1102
1103 case BIT_XOR_EXPR:
1104 low = int1l ^ int2l, hi = int1h ^ int2h;
1105 break;
1106
1107 case BIT_AND_EXPR:
1108 low = int1l & int2l, hi = int1h & int2h;
1109 break;
1110
1111 case BIT_ANDTC_EXPR:
1112 low = int1l & ~int2l, hi = int1h & ~int2h;
1113 break;
1114
1115 case RSHIFT_EXPR:
1116 int2l = -int2l;
1117 case LSHIFT_EXPR:
1118 /* It's unclear from the C standard whether shifts can overflow.
1119 The following code ignores overflow; perhaps a C standard
1120 interpretation ruling is needed. */
1121 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1122 &low, &hi, !uns);
1123 no_overflow = 1;
1124 break;
1125
1126 case RROTATE_EXPR:
1127 int2l = - int2l;
1128 case LROTATE_EXPR:
1129 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1130 &low, &hi);
1131 break;
1132
1133 case PLUS_EXPR:
1134 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1135 break;
1136
1137 case MINUS_EXPR:
1138 neg_double (int2l, int2h, &low, &hi);
1139 add_double (int1l, int1h, low, hi, &low, &hi);
1140 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1141 break;
1142
1143 case MULT_EXPR:
1144 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1145 break;
1146
1147 case TRUNC_DIV_EXPR:
1148 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1149 case EXACT_DIV_EXPR:
1150 /* This is a shortcut for a common special case. */
1151 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1152 && ! TREE_CONSTANT_OVERFLOW (arg1)
1153 && ! TREE_CONSTANT_OVERFLOW (arg2)
1154 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1155 {
1156 if (code == CEIL_DIV_EXPR)
1157 int1l += int2l - 1;
1158
1159 low = int1l / int2l, hi = 0;
1160 break;
1161 }
1162
1163 /* ... fall through ... */
1164
1165 case ROUND_DIV_EXPR:
1166 if (int2h == 0 && int2l == 1)
1167 {
1168 low = int1l, hi = int1h;
1169 break;
1170 }
1171 if (int1l == int2l && int1h == int2h
1172 && ! (int1l == 0 && int1h == 0))
1173 {
1174 low = 1, hi = 0;
1175 break;
1176 }
1177 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1178 &low, &hi, &garbagel, &garbageh);
1179 break;
1180
1181 case TRUNC_MOD_EXPR:
1182 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1183 /* This is a shortcut for a common special case. */
1184 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1185 && ! TREE_CONSTANT_OVERFLOW (arg1)
1186 && ! TREE_CONSTANT_OVERFLOW (arg2)
1187 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1188 {
1189 if (code == CEIL_MOD_EXPR)
1190 int1l += int2l - 1;
1191 low = int1l % int2l, hi = 0;
1192 break;
1193 }
1194
1195 /* ... fall through ... */
1196
1197 case ROUND_MOD_EXPR:
1198 overflow = div_and_round_double (code, uns,
1199 int1l, int1h, int2l, int2h,
1200 &garbagel, &garbageh, &low, &hi);
1201 break;
1202
1203 case MIN_EXPR:
1204 case MAX_EXPR:
1205 if (uns)
1206 low = (((unsigned HOST_WIDE_INT) int1h
1207 < (unsigned HOST_WIDE_INT) int2h)
1208 || (((unsigned HOST_WIDE_INT) int1h
1209 == (unsigned HOST_WIDE_INT) int2h)
1210 && int1l < int2l));
1211 else
1212 low = (int1h < int2h
1213 || (int1h == int2h && int1l < int2l));
1214
1215 if (low == (code == MIN_EXPR))
1216 low = int1l, hi = int1h;
1217 else
1218 low = int2l, hi = int2h;
1219 break;
1220
1221 default:
1222 abort ();
1223 }
1224
1225 /* If this is for a sizetype, can be represented as one (signed)
1226 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1227 constants. */
1228 if (is_sizetype
1229 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1230 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1231 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1232 return size_int_type_wide (low, type);
1233 else
1234 {
1235 t = build_int_2 (low, hi);
1236 TREE_TYPE (t) = TREE_TYPE (arg1);
1237 }
1238
1239 TREE_OVERFLOW (t)
1240 = ((notrunc
1241 ? (!uns || is_sizetype) && overflow
1242 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1243 && ! no_overflow))
1244 | TREE_OVERFLOW (arg1)
1245 | TREE_OVERFLOW (arg2));
1246
1247 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1248 So check if force_fit_type truncated the value. */
1249 if (is_sizetype
1250 && ! TREE_OVERFLOW (t)
1251 && (TREE_INT_CST_HIGH (t) != hi
1252 || TREE_INT_CST_LOW (t) != low))
1253 TREE_OVERFLOW (t) = 1;
1254
1255 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1256 | TREE_CONSTANT_OVERFLOW (arg1)
1257 | TREE_CONSTANT_OVERFLOW (arg2));
1258 return t;
1259 }
1260
1261 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1262 constant. We assume ARG1 and ARG2 have the same data type, or at least
1263 are the same kind of constant and the same machine mode.
1264
1265 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1266
1267 static tree
1268 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1269 {
1270 STRIP_NOPS (arg1);
1271 STRIP_NOPS (arg2);
1272
1273 if (TREE_CODE (arg1) == INTEGER_CST)
1274 return int_const_binop (code, arg1, arg2, notrunc);
1275
1276 if (TREE_CODE (arg1) == REAL_CST)
1277 {
1278 enum machine_mode mode;
1279 REAL_VALUE_TYPE d1;
1280 REAL_VALUE_TYPE d2;
1281 REAL_VALUE_TYPE value;
1282 tree t, type;
1283
1284 d1 = TREE_REAL_CST (arg1);
1285 d2 = TREE_REAL_CST (arg2);
1286
1287 type = TREE_TYPE (arg1);
1288 mode = TYPE_MODE (type);
1289
1290 /* Don't perform operation if we honor signaling NaNs and
1291 either operand is a NaN. */
1292 if (HONOR_SNANS (mode)
1293 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1294 return NULL_TREE;
1295
1296 /* Don't perform operation if it would raise a division
1297 by zero exception. */
1298 if (code == RDIV_EXPR
1299 && REAL_VALUES_EQUAL (d2, dconst0)
1300 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1301 return NULL_TREE;
1302
1303 /* If either operand is a NaN, just return it. Otherwise, set up
1304 for floating-point trap; we return an overflow. */
1305 if (REAL_VALUE_ISNAN (d1))
1306 return arg1;
1307 else if (REAL_VALUE_ISNAN (d2))
1308 return arg2;
1309
1310 REAL_ARITHMETIC (value, code, d1, d2);
1311
1312 t = build_real (type, real_value_truncate (mode, value));
1313
1314 TREE_OVERFLOW (t)
1315 = (force_fit_type (t, 0)
1316 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1317 TREE_CONSTANT_OVERFLOW (t)
1318 = TREE_OVERFLOW (t)
1319 | TREE_CONSTANT_OVERFLOW (arg1)
1320 | TREE_CONSTANT_OVERFLOW (arg2);
1321 return t;
1322 }
1323 if (TREE_CODE (arg1) == COMPLEX_CST)
1324 {
1325 tree type = TREE_TYPE (arg1);
1326 tree r1 = TREE_REALPART (arg1);
1327 tree i1 = TREE_IMAGPART (arg1);
1328 tree r2 = TREE_REALPART (arg2);
1329 tree i2 = TREE_IMAGPART (arg2);
1330 tree t;
1331
1332 switch (code)
1333 {
1334 case PLUS_EXPR:
1335 t = build_complex (type,
1336 const_binop (PLUS_EXPR, r1, r2, notrunc),
1337 const_binop (PLUS_EXPR, i1, i2, notrunc));
1338 break;
1339
1340 case MINUS_EXPR:
1341 t = build_complex (type,
1342 const_binop (MINUS_EXPR, r1, r2, notrunc),
1343 const_binop (MINUS_EXPR, i1, i2, notrunc));
1344 break;
1345
1346 case MULT_EXPR:
1347 t = build_complex (type,
1348 const_binop (MINUS_EXPR,
1349 const_binop (MULT_EXPR,
1350 r1, r2, notrunc),
1351 const_binop (MULT_EXPR,
1352 i1, i2, notrunc),
1353 notrunc),
1354 const_binop (PLUS_EXPR,
1355 const_binop (MULT_EXPR,
1356 r1, i2, notrunc),
1357 const_binop (MULT_EXPR,
1358 i1, r2, notrunc),
1359 notrunc));
1360 break;
1361
1362 case RDIV_EXPR:
1363 {
1364 tree magsquared
1365 = const_binop (PLUS_EXPR,
1366 const_binop (MULT_EXPR, r2, r2, notrunc),
1367 const_binop (MULT_EXPR, i2, i2, notrunc),
1368 notrunc);
1369
1370 t = build_complex (type,
1371 const_binop
1372 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1373 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1374 const_binop (PLUS_EXPR,
1375 const_binop (MULT_EXPR, r1, r2,
1376 notrunc),
1377 const_binop (MULT_EXPR, i1, i2,
1378 notrunc),
1379 notrunc),
1380 magsquared, notrunc),
1381 const_binop
1382 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1383 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1384 const_binop (MINUS_EXPR,
1385 const_binop (MULT_EXPR, i1, r2,
1386 notrunc),
1387 const_binop (MULT_EXPR, r1, i2,
1388 notrunc),
1389 notrunc),
1390 magsquared, notrunc));
1391 }
1392 break;
1393
1394 default:
1395 abort ();
1396 }
1397 return t;
1398 }
1399 return 0;
1400 }
1401
1402 /* These are the hash table functions for the hash table of INTEGER_CST
1403 nodes of a sizetype. */
1404
1405 /* Return the hash code code X, an INTEGER_CST. */
1406
1407 static hashval_t
1408 size_htab_hash (const void *x)
1409 {
1410 tree t = (tree) x;
1411
1412 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1413 ^ htab_hash_pointer (TREE_TYPE (t))
1414 ^ (TREE_OVERFLOW (t) << 20));
1415 }
1416
1417 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1418 is the same as that given by *Y, which is the same. */
1419
1420 static int
1421 size_htab_eq (const void *x, const void *y)
1422 {
1423 tree xt = (tree) x;
1424 tree yt = (tree) y;
1425
1426 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1427 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1428 && TREE_TYPE (xt) == TREE_TYPE (yt)
1429 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1430 }
1431 \f
1432 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1433 bits are given by NUMBER and of the sizetype represented by KIND. */
1434
1435 tree
1436 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1437 {
1438 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1439 }
1440
1441 /* Likewise, but the desired type is specified explicitly. */
1442
1443 static GTY (()) tree new_const;
1444 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1445 htab_t size_htab;
1446
1447 tree
1448 size_int_type_wide (HOST_WIDE_INT number, tree type)
1449 {
1450 void **slot;
1451
1452 if (size_htab == 0)
1453 {
1454 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1455 new_const = make_node (INTEGER_CST);
1456 }
1457
1458 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1459 hash table, we return the value from the hash table. Otherwise, we
1460 place that in the hash table and make a new node for the next time. */
1461 TREE_INT_CST_LOW (new_const) = number;
1462 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1463 TREE_TYPE (new_const) = type;
1464 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1465 = force_fit_type (new_const, 0);
1466
1467 slot = htab_find_slot (size_htab, new_const, INSERT);
1468 if (*slot == 0)
1469 {
1470 tree t = new_const;
1471
1472 *slot = new_const;
1473 new_const = make_node (INTEGER_CST);
1474 return t;
1475 }
1476 else
1477 return (tree) *slot;
1478 }
1479
1480 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1481 is a tree code. The type of the result is taken from the operands.
1482 Both must be the same type integer type and it must be a size type.
1483 If the operands are constant, so is the result. */
1484
1485 tree
1486 size_binop (enum tree_code code, tree arg0, tree arg1)
1487 {
1488 tree type = TREE_TYPE (arg0);
1489
1490 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1491 || type != TREE_TYPE (arg1))
1492 abort ();
1493
1494 /* Handle the special case of two integer constants faster. */
1495 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1496 {
1497 /* And some specific cases even faster than that. */
1498 if (code == PLUS_EXPR && integer_zerop (arg0))
1499 return arg1;
1500 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1501 && integer_zerop (arg1))
1502 return arg0;
1503 else if (code == MULT_EXPR && integer_onep (arg0))
1504 return arg1;
1505
1506 /* Handle general case of two integer constants. */
1507 return int_const_binop (code, arg0, arg1, 0);
1508 }
1509
1510 if (arg0 == error_mark_node || arg1 == error_mark_node)
1511 return error_mark_node;
1512
1513 return fold (build (code, type, arg0, arg1));
1514 }
1515
1516 /* Given two values, either both of sizetype or both of bitsizetype,
1517 compute the difference between the two values. Return the value
1518 in signed type corresponding to the type of the operands. */
1519
1520 tree
1521 size_diffop (tree arg0, tree arg1)
1522 {
1523 tree type = TREE_TYPE (arg0);
1524 tree ctype;
1525
1526 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1527 || type != TREE_TYPE (arg1))
1528 abort ();
1529
1530 /* If the type is already signed, just do the simple thing. */
1531 if (! TREE_UNSIGNED (type))
1532 return size_binop (MINUS_EXPR, arg0, arg1);
1533
1534 ctype = (type == bitsizetype || type == ubitsizetype
1535 ? sbitsizetype : ssizetype);
1536
1537 /* If either operand is not a constant, do the conversions to the signed
1538 type and subtract. The hardware will do the right thing with any
1539 overflow in the subtraction. */
1540 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1541 return size_binop (MINUS_EXPR, convert (ctype, arg0),
1542 convert (ctype, arg1));
1543
1544 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1545 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1546 overflow) and negate (which can't either). Special-case a result
1547 of zero while we're here. */
1548 if (tree_int_cst_equal (arg0, arg1))
1549 return convert (ctype, integer_zero_node);
1550 else if (tree_int_cst_lt (arg1, arg0))
1551 return convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1552 else
1553 return size_binop (MINUS_EXPR, convert (ctype, integer_zero_node),
1554 convert (ctype, size_binop (MINUS_EXPR, arg1, arg0)));
1555 }
1556 \f
1557
1558 /* Given T, a tree representing type conversion of ARG1, a constant,
1559 return a constant tree representing the result of conversion. */
1560
1561 static tree
1562 fold_convert (tree t, tree arg1)
1563 {
1564 tree type = TREE_TYPE (t);
1565 int overflow = 0;
1566
1567 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1568 {
1569 if (TREE_CODE (arg1) == INTEGER_CST)
1570 {
1571 /* If we would build a constant wider than GCC supports,
1572 leave the conversion unfolded. */
1573 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1574 return t;
1575
1576 /* If we are trying to make a sizetype for a small integer, use
1577 size_int to pick up cached types to reduce duplicate nodes. */
1578 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1579 && !TREE_CONSTANT_OVERFLOW (arg1)
1580 && compare_tree_int (arg1, 10000) < 0)
1581 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1582
1583 /* Given an integer constant, make new constant with new type,
1584 appropriately sign-extended or truncated. */
1585 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1586 TREE_INT_CST_HIGH (arg1));
1587 TREE_TYPE (t) = type;
1588 /* Indicate an overflow if (1) ARG1 already overflowed,
1589 or (2) force_fit_type indicates an overflow.
1590 Tell force_fit_type that an overflow has already occurred
1591 if ARG1 is a too-large unsigned value and T is signed.
1592 But don't indicate an overflow if converting a pointer. */
1593 TREE_OVERFLOW (t)
1594 = ((force_fit_type (t,
1595 (TREE_INT_CST_HIGH (arg1) < 0
1596 && (TREE_UNSIGNED (type)
1597 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1598 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1599 || TREE_OVERFLOW (arg1));
1600 TREE_CONSTANT_OVERFLOW (t)
1601 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1602 }
1603 else if (TREE_CODE (arg1) == REAL_CST)
1604 {
1605 /* Don't initialize these, use assignments.
1606 Initialized local aggregates don't work on old compilers. */
1607 REAL_VALUE_TYPE x;
1608 REAL_VALUE_TYPE l;
1609 REAL_VALUE_TYPE u;
1610 tree type1 = TREE_TYPE (arg1);
1611 int no_upper_bound;
1612
1613 x = TREE_REAL_CST (arg1);
1614 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1615
1616 no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
1617 if (!no_upper_bound)
1618 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1619
1620 /* See if X will be in range after truncation towards 0.
1621 To compensate for truncation, move the bounds away from 0,
1622 but reject if X exactly equals the adjusted bounds. */
1623 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1624 if (!no_upper_bound)
1625 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1626 /* If X is a NaN, use zero instead and show we have an overflow.
1627 Otherwise, range check. */
1628 if (REAL_VALUE_ISNAN (x))
1629 overflow = 1, x = dconst0;
1630 else if (! (REAL_VALUES_LESS (l, x)
1631 && !no_upper_bound
1632 && REAL_VALUES_LESS (x, u)))
1633 overflow = 1;
1634
1635 {
1636 HOST_WIDE_INT low, high;
1637 REAL_VALUE_TO_INT (&low, &high, x);
1638 t = build_int_2 (low, high);
1639 }
1640 TREE_TYPE (t) = type;
1641 TREE_OVERFLOW (t)
1642 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1643 TREE_CONSTANT_OVERFLOW (t)
1644 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1645 }
1646 TREE_TYPE (t) = type;
1647 }
1648 else if (TREE_CODE (type) == REAL_TYPE)
1649 {
1650 if (TREE_CODE (arg1) == INTEGER_CST)
1651 return build_real_from_int_cst (type, arg1);
1652 if (TREE_CODE (arg1) == REAL_CST)
1653 {
1654 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1655 {
1656 /* We make a copy of ARG1 so that we don't modify an
1657 existing constant tree. */
1658 t = copy_node (arg1);
1659 TREE_TYPE (t) = type;
1660 return t;
1661 }
1662
1663 t = build_real (type,
1664 real_value_truncate (TYPE_MODE (type),
1665 TREE_REAL_CST (arg1)));
1666
1667 TREE_OVERFLOW (t)
1668 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1669 TREE_CONSTANT_OVERFLOW (t)
1670 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1671 return t;
1672 }
1673 }
1674 TREE_CONSTANT (t) = 1;
1675 return t;
1676 }
1677 \f
1678 /* Return an expr equal to X but certainly not valid as an lvalue. */
1679
1680 tree
1681 non_lvalue (tree x)
1682 {
1683 tree result;
1684
1685 /* These things are certainly not lvalues. */
1686 if (TREE_CODE (x) == NON_LVALUE_EXPR
1687 || TREE_CODE (x) == INTEGER_CST
1688 || TREE_CODE (x) == REAL_CST
1689 || TREE_CODE (x) == STRING_CST
1690 || TREE_CODE (x) == ADDR_EXPR)
1691 return x;
1692
1693 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1694 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1695 return result;
1696 }
1697
1698 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1699 Zero means allow extended lvalues. */
1700
1701 int pedantic_lvalues;
1702
1703 /* When pedantic, return an expr equal to X but certainly not valid as a
1704 pedantic lvalue. Otherwise, return X. */
1705
1706 tree
1707 pedantic_non_lvalue (tree x)
1708 {
1709 if (pedantic_lvalues)
1710 return non_lvalue (x);
1711 else
1712 return x;
1713 }
1714 \f
1715 /* Given a tree comparison code, return the code that is the logical inverse
1716 of the given code. It is not safe to do this for floating-point
1717 comparisons, except for NE_EXPR and EQ_EXPR. */
1718
1719 static enum tree_code
1720 invert_tree_comparison (enum tree_code code)
1721 {
1722 switch (code)
1723 {
1724 case EQ_EXPR:
1725 return NE_EXPR;
1726 case NE_EXPR:
1727 return EQ_EXPR;
1728 case GT_EXPR:
1729 return LE_EXPR;
1730 case GE_EXPR:
1731 return LT_EXPR;
1732 case LT_EXPR:
1733 return GE_EXPR;
1734 case LE_EXPR:
1735 return GT_EXPR;
1736 default:
1737 abort ();
1738 }
1739 }
1740
1741 /* Similar, but return the comparison that results if the operands are
1742 swapped. This is safe for floating-point. */
1743
1744 static enum tree_code
1745 swap_tree_comparison (enum tree_code code)
1746 {
1747 switch (code)
1748 {
1749 case EQ_EXPR:
1750 case NE_EXPR:
1751 return code;
1752 case GT_EXPR:
1753 return LT_EXPR;
1754 case GE_EXPR:
1755 return LE_EXPR;
1756 case LT_EXPR:
1757 return GT_EXPR;
1758 case LE_EXPR:
1759 return GE_EXPR;
1760 default:
1761 abort ();
1762 }
1763 }
1764
1765
1766 /* Convert a comparison tree code from an enum tree_code representation
1767 into a compcode bit-based encoding. This function is the inverse of
1768 compcode_to_comparison. */
1769
1770 static int
1771 comparison_to_compcode (enum tree_code code)
1772 {
1773 switch (code)
1774 {
1775 case LT_EXPR:
1776 return COMPCODE_LT;
1777 case EQ_EXPR:
1778 return COMPCODE_EQ;
1779 case LE_EXPR:
1780 return COMPCODE_LE;
1781 case GT_EXPR:
1782 return COMPCODE_GT;
1783 case NE_EXPR:
1784 return COMPCODE_NE;
1785 case GE_EXPR:
1786 return COMPCODE_GE;
1787 default:
1788 abort ();
1789 }
1790 }
1791
1792 /* Convert a compcode bit-based encoding of a comparison operator back
1793 to GCC's enum tree_code representation. This function is the
1794 inverse of comparison_to_compcode. */
1795
1796 static enum tree_code
1797 compcode_to_comparison (int code)
1798 {
1799 switch (code)
1800 {
1801 case COMPCODE_LT:
1802 return LT_EXPR;
1803 case COMPCODE_EQ:
1804 return EQ_EXPR;
1805 case COMPCODE_LE:
1806 return LE_EXPR;
1807 case COMPCODE_GT:
1808 return GT_EXPR;
1809 case COMPCODE_NE:
1810 return NE_EXPR;
1811 case COMPCODE_GE:
1812 return GE_EXPR;
1813 default:
1814 abort ();
1815 }
1816 }
1817
1818 /* Return nonzero if CODE is a tree code that represents a truth value. */
1819
1820 static int
1821 truth_value_p (enum tree_code code)
1822 {
1823 return (TREE_CODE_CLASS (code) == '<'
1824 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1825 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1826 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1827 }
1828 \f
1829 /* Return nonzero if two operands are necessarily equal.
1830 If ONLY_CONST is nonzero, only return nonzero for constants.
1831 This function tests whether the operands are indistinguishable;
1832 it does not test whether they are equal using C's == operation.
1833 The distinction is important for IEEE floating point, because
1834 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1835 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1836
1837 int
1838 operand_equal_p (tree arg0, tree arg1, int only_const)
1839 {
1840 tree fndecl;
1841
1842 /* If both types don't have the same signedness, then we can't consider
1843 them equal. We must check this before the STRIP_NOPS calls
1844 because they may change the signedness of the arguments. */
1845 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1846 return 0;
1847
1848 STRIP_NOPS (arg0);
1849 STRIP_NOPS (arg1);
1850
1851 if (TREE_CODE (arg0) != TREE_CODE (arg1)
1852 /* This is needed for conversions and for COMPONENT_REF.
1853 Might as well play it safe and always test this. */
1854 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
1855 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
1856 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1857 return 0;
1858
1859 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1860 We don't care about side effects in that case because the SAVE_EXPR
1861 takes care of that for us. In all other cases, two expressions are
1862 equal if they have no side effects. If we have two identical
1863 expressions with side effects that should be treated the same due
1864 to the only side effects being identical SAVE_EXPR's, that will
1865 be detected in the recursive calls below. */
1866 if (arg0 == arg1 && ! only_const
1867 && (TREE_CODE (arg0) == SAVE_EXPR
1868 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
1869 return 1;
1870
1871 /* Next handle constant cases, those for which we can return 1 even
1872 if ONLY_CONST is set. */
1873 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
1874 switch (TREE_CODE (arg0))
1875 {
1876 case INTEGER_CST:
1877 return (! TREE_CONSTANT_OVERFLOW (arg0)
1878 && ! TREE_CONSTANT_OVERFLOW (arg1)
1879 && tree_int_cst_equal (arg0, arg1));
1880
1881 case REAL_CST:
1882 return (! TREE_CONSTANT_OVERFLOW (arg0)
1883 && ! TREE_CONSTANT_OVERFLOW (arg1)
1884 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
1885 TREE_REAL_CST (arg1)));
1886
1887 case VECTOR_CST:
1888 {
1889 tree v1, v2;
1890
1891 if (TREE_CONSTANT_OVERFLOW (arg0)
1892 || TREE_CONSTANT_OVERFLOW (arg1))
1893 return 0;
1894
1895 v1 = TREE_VECTOR_CST_ELTS (arg0);
1896 v2 = TREE_VECTOR_CST_ELTS (arg1);
1897 while (v1 && v2)
1898 {
1899 if (!operand_equal_p (v1, v2, only_const))
1900 return 0;
1901 v1 = TREE_CHAIN (v1);
1902 v2 = TREE_CHAIN (v2);
1903 }
1904
1905 return 1;
1906 }
1907
1908 case COMPLEX_CST:
1909 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
1910 only_const)
1911 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
1912 only_const));
1913
1914 case STRING_CST:
1915 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
1916 && ! memcmp (TREE_STRING_POINTER (arg0),
1917 TREE_STRING_POINTER (arg1),
1918 TREE_STRING_LENGTH (arg0)));
1919
1920 case ADDR_EXPR:
1921 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
1922 0);
1923 default:
1924 break;
1925 }
1926
1927 if (only_const)
1928 return 0;
1929
1930 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1931 {
1932 case '1':
1933 /* Two conversions are equal only if signedness and modes match. */
1934 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1935 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1936 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1937 return 0;
1938
1939 return operand_equal_p (TREE_OPERAND (arg0, 0),
1940 TREE_OPERAND (arg1, 0), 0);
1941
1942 case '<':
1943 case '2':
1944 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
1945 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
1946 0))
1947 return 1;
1948
1949 /* For commutative ops, allow the other order. */
1950 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
1951 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
1952 || TREE_CODE (arg0) == BIT_IOR_EXPR
1953 || TREE_CODE (arg0) == BIT_XOR_EXPR
1954 || TREE_CODE (arg0) == BIT_AND_EXPR
1955 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
1956 && operand_equal_p (TREE_OPERAND (arg0, 0),
1957 TREE_OPERAND (arg1, 1), 0)
1958 && operand_equal_p (TREE_OPERAND (arg0, 1),
1959 TREE_OPERAND (arg1, 0), 0));
1960
1961 case 'r':
1962 /* If either of the pointer (or reference) expressions we are
1963 dereferencing contain a side effect, these cannot be equal. */
1964 if (TREE_SIDE_EFFECTS (arg0)
1965 || TREE_SIDE_EFFECTS (arg1))
1966 return 0;
1967
1968 switch (TREE_CODE (arg0))
1969 {
1970 case INDIRECT_REF:
1971 return operand_equal_p (TREE_OPERAND (arg0, 0),
1972 TREE_OPERAND (arg1, 0), 0);
1973
1974 case COMPONENT_REF:
1975 case ARRAY_REF:
1976 case ARRAY_RANGE_REF:
1977 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1978 TREE_OPERAND (arg1, 0), 0)
1979 && operand_equal_p (TREE_OPERAND (arg0, 1),
1980 TREE_OPERAND (arg1, 1), 0));
1981
1982 case BIT_FIELD_REF:
1983 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1984 TREE_OPERAND (arg1, 0), 0)
1985 && operand_equal_p (TREE_OPERAND (arg0, 1),
1986 TREE_OPERAND (arg1, 1), 0)
1987 && operand_equal_p (TREE_OPERAND (arg0, 2),
1988 TREE_OPERAND (arg1, 2), 0));
1989 default:
1990 return 0;
1991 }
1992
1993 case 'e':
1994 switch (TREE_CODE (arg0))
1995 {
1996 case ADDR_EXPR:
1997 case TRUTH_NOT_EXPR:
1998 return operand_equal_p (TREE_OPERAND (arg0, 0),
1999 TREE_OPERAND (arg1, 0), 0);
2000
2001 case RTL_EXPR:
2002 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2003
2004 case CALL_EXPR:
2005 /* If the CALL_EXPRs call different functions, then they
2006 clearly can not be equal. */
2007 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2008 TREE_OPERAND (arg1, 0), 0))
2009 return 0;
2010
2011 /* Only consider const functions equivalent. */
2012 fndecl = get_callee_fndecl (arg0);
2013 if (fndecl == NULL_TREE
2014 || ! (flags_from_decl_or_type (fndecl) & ECF_CONST))
2015 return 0;
2016
2017 /* Now see if all the arguments are the same. operand_equal_p
2018 does not handle TREE_LIST, so we walk the operands here
2019 feeding them to operand_equal_p. */
2020 arg0 = TREE_OPERAND (arg0, 1);
2021 arg1 = TREE_OPERAND (arg1, 1);
2022 while (arg0 && arg1)
2023 {
2024 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), 0))
2025 return 0;
2026
2027 arg0 = TREE_CHAIN (arg0);
2028 arg1 = TREE_CHAIN (arg1);
2029 }
2030
2031 /* If we get here and both argument lists are exhausted
2032 then the CALL_EXPRs are equal. */
2033 return ! (arg0 || arg1);
2034
2035 default:
2036 return 0;
2037 }
2038
2039 case 'd':
2040 /* Consider __builtin_sqrt equal to sqrt. */
2041 return TREE_CODE (arg0) == FUNCTION_DECL
2042 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2043 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2044 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1);
2045
2046 default:
2047 return 0;
2048 }
2049 }
2050 \f
2051 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2052 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2053
2054 When in doubt, return 0. */
2055
2056 static int
2057 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2058 {
2059 int unsignedp1, unsignedpo;
2060 tree primarg0, primarg1, primother;
2061 unsigned int correct_width;
2062
2063 if (operand_equal_p (arg0, arg1, 0))
2064 return 1;
2065
2066 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2067 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2068 return 0;
2069
2070 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2071 and see if the inner values are the same. This removes any
2072 signedness comparison, which doesn't matter here. */
2073 primarg0 = arg0, primarg1 = arg1;
2074 STRIP_NOPS (primarg0);
2075 STRIP_NOPS (primarg1);
2076 if (operand_equal_p (primarg0, primarg1, 0))
2077 return 1;
2078
2079 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2080 actual comparison operand, ARG0.
2081
2082 First throw away any conversions to wider types
2083 already present in the operands. */
2084
2085 primarg1 = get_narrower (arg1, &unsignedp1);
2086 primother = get_narrower (other, &unsignedpo);
2087
2088 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2089 if (unsignedp1 == unsignedpo
2090 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2091 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2092 {
2093 tree type = TREE_TYPE (arg0);
2094
2095 /* Make sure shorter operand is extended the right way
2096 to match the longer operand. */
2097 primarg1 = convert ((*lang_hooks.types.signed_or_unsigned_type)
2098 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2099
2100 if (operand_equal_p (arg0, convert (type, primarg1), 0))
2101 return 1;
2102 }
2103
2104 return 0;
2105 }
2106 \f
2107 /* See if ARG is an expression that is either a comparison or is performing
2108 arithmetic on comparisons. The comparisons must only be comparing
2109 two different values, which will be stored in *CVAL1 and *CVAL2; if
2110 they are nonzero it means that some operands have already been found.
2111 No variables may be used anywhere else in the expression except in the
2112 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2113 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2114
2115 If this is true, return 1. Otherwise, return zero. */
2116
2117 static int
2118 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2119 {
2120 enum tree_code code = TREE_CODE (arg);
2121 char class = TREE_CODE_CLASS (code);
2122
2123 /* We can handle some of the 'e' cases here. */
2124 if (class == 'e' && code == TRUTH_NOT_EXPR)
2125 class = '1';
2126 else if (class == 'e'
2127 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2128 || code == COMPOUND_EXPR))
2129 class = '2';
2130
2131 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2132 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2133 {
2134 /* If we've already found a CVAL1 or CVAL2, this expression is
2135 two complex to handle. */
2136 if (*cval1 || *cval2)
2137 return 0;
2138
2139 class = '1';
2140 *save_p = 1;
2141 }
2142
2143 switch (class)
2144 {
2145 case '1':
2146 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2147
2148 case '2':
2149 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2150 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2151 cval1, cval2, save_p));
2152
2153 case 'c':
2154 return 1;
2155
2156 case 'e':
2157 if (code == COND_EXPR)
2158 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2159 cval1, cval2, save_p)
2160 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2161 cval1, cval2, save_p)
2162 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2163 cval1, cval2, save_p));
2164 return 0;
2165
2166 case '<':
2167 /* First see if we can handle the first operand, then the second. For
2168 the second operand, we know *CVAL1 can't be zero. It must be that
2169 one side of the comparison is each of the values; test for the
2170 case where this isn't true by failing if the two operands
2171 are the same. */
2172
2173 if (operand_equal_p (TREE_OPERAND (arg, 0),
2174 TREE_OPERAND (arg, 1), 0))
2175 return 0;
2176
2177 if (*cval1 == 0)
2178 *cval1 = TREE_OPERAND (arg, 0);
2179 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2180 ;
2181 else if (*cval2 == 0)
2182 *cval2 = TREE_OPERAND (arg, 0);
2183 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2184 ;
2185 else
2186 return 0;
2187
2188 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2189 ;
2190 else if (*cval2 == 0)
2191 *cval2 = TREE_OPERAND (arg, 1);
2192 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2193 ;
2194 else
2195 return 0;
2196
2197 return 1;
2198
2199 default:
2200 return 0;
2201 }
2202 }
2203 \f
2204 /* ARG is a tree that is known to contain just arithmetic operations and
2205 comparisons. Evaluate the operations in the tree substituting NEW0 for
2206 any occurrence of OLD0 as an operand of a comparison and likewise for
2207 NEW1 and OLD1. */
2208
2209 static tree
2210 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2211 {
2212 tree type = TREE_TYPE (arg);
2213 enum tree_code code = TREE_CODE (arg);
2214 char class = TREE_CODE_CLASS (code);
2215
2216 /* We can handle some of the 'e' cases here. */
2217 if (class == 'e' && code == TRUTH_NOT_EXPR)
2218 class = '1';
2219 else if (class == 'e'
2220 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2221 class = '2';
2222
2223 switch (class)
2224 {
2225 case '1':
2226 return fold (build1 (code, type,
2227 eval_subst (TREE_OPERAND (arg, 0),
2228 old0, new0, old1, new1)));
2229
2230 case '2':
2231 return fold (build (code, type,
2232 eval_subst (TREE_OPERAND (arg, 0),
2233 old0, new0, old1, new1),
2234 eval_subst (TREE_OPERAND (arg, 1),
2235 old0, new0, old1, new1)));
2236
2237 case 'e':
2238 switch (code)
2239 {
2240 case SAVE_EXPR:
2241 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2242
2243 case COMPOUND_EXPR:
2244 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2245
2246 case COND_EXPR:
2247 return fold (build (code, type,
2248 eval_subst (TREE_OPERAND (arg, 0),
2249 old0, new0, old1, new1),
2250 eval_subst (TREE_OPERAND (arg, 1),
2251 old0, new0, old1, new1),
2252 eval_subst (TREE_OPERAND (arg, 2),
2253 old0, new0, old1, new1)));
2254 default:
2255 break;
2256 }
2257 /* Fall through - ??? */
2258
2259 case '<':
2260 {
2261 tree arg0 = TREE_OPERAND (arg, 0);
2262 tree arg1 = TREE_OPERAND (arg, 1);
2263
2264 /* We need to check both for exact equality and tree equality. The
2265 former will be true if the operand has a side-effect. In that
2266 case, we know the operand occurred exactly once. */
2267
2268 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2269 arg0 = new0;
2270 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2271 arg0 = new1;
2272
2273 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2274 arg1 = new0;
2275 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2276 arg1 = new1;
2277
2278 return fold (build (code, type, arg0, arg1));
2279 }
2280
2281 default:
2282 return arg;
2283 }
2284 }
2285 \f
2286 /* Return a tree for the case when the result of an expression is RESULT
2287 converted to TYPE and OMITTED was previously an operand of the expression
2288 but is now not needed (e.g., we folded OMITTED * 0).
2289
2290 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2291 the conversion of RESULT to TYPE. */
2292
2293 tree
2294 omit_one_operand (tree type, tree result, tree omitted)
2295 {
2296 tree t = convert (type, result);
2297
2298 if (TREE_SIDE_EFFECTS (omitted))
2299 return build (COMPOUND_EXPR, type, omitted, t);
2300
2301 return non_lvalue (t);
2302 }
2303
2304 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2305
2306 static tree
2307 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2308 {
2309 tree t = convert (type, result);
2310
2311 if (TREE_SIDE_EFFECTS (omitted))
2312 return build (COMPOUND_EXPR, type, omitted, t);
2313
2314 return pedantic_non_lvalue (t);
2315 }
2316 \f
2317 /* Return a simplified tree node for the truth-negation of ARG. This
2318 never alters ARG itself. We assume that ARG is an operation that
2319 returns a truth value (0 or 1). */
2320
2321 tree
2322 invert_truthvalue (tree arg)
2323 {
2324 tree type = TREE_TYPE (arg);
2325 enum tree_code code = TREE_CODE (arg);
2326
2327 if (code == ERROR_MARK)
2328 return arg;
2329
2330 /* If this is a comparison, we can simply invert it, except for
2331 floating-point non-equality comparisons, in which case we just
2332 enclose a TRUTH_NOT_EXPR around what we have. */
2333
2334 if (TREE_CODE_CLASS (code) == '<')
2335 {
2336 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2337 && !flag_unsafe_math_optimizations
2338 && code != NE_EXPR
2339 && code != EQ_EXPR)
2340 return build1 (TRUTH_NOT_EXPR, type, arg);
2341 else
2342 return build (invert_tree_comparison (code), type,
2343 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2344 }
2345
2346 switch (code)
2347 {
2348 case INTEGER_CST:
2349 return convert (type, build_int_2 (integer_zerop (arg), 0));
2350
2351 case TRUTH_AND_EXPR:
2352 return build (TRUTH_OR_EXPR, type,
2353 invert_truthvalue (TREE_OPERAND (arg, 0)),
2354 invert_truthvalue (TREE_OPERAND (arg, 1)));
2355
2356 case TRUTH_OR_EXPR:
2357 return build (TRUTH_AND_EXPR, type,
2358 invert_truthvalue (TREE_OPERAND (arg, 0)),
2359 invert_truthvalue (TREE_OPERAND (arg, 1)));
2360
2361 case TRUTH_XOR_EXPR:
2362 /* Here we can invert either operand. We invert the first operand
2363 unless the second operand is a TRUTH_NOT_EXPR in which case our
2364 result is the XOR of the first operand with the inside of the
2365 negation of the second operand. */
2366
2367 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2368 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2369 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2370 else
2371 return build (TRUTH_XOR_EXPR, type,
2372 invert_truthvalue (TREE_OPERAND (arg, 0)),
2373 TREE_OPERAND (arg, 1));
2374
2375 case TRUTH_ANDIF_EXPR:
2376 return build (TRUTH_ORIF_EXPR, type,
2377 invert_truthvalue (TREE_OPERAND (arg, 0)),
2378 invert_truthvalue (TREE_OPERAND (arg, 1)));
2379
2380 case TRUTH_ORIF_EXPR:
2381 return build (TRUTH_ANDIF_EXPR, type,
2382 invert_truthvalue (TREE_OPERAND (arg, 0)),
2383 invert_truthvalue (TREE_OPERAND (arg, 1)));
2384
2385 case TRUTH_NOT_EXPR:
2386 return TREE_OPERAND (arg, 0);
2387
2388 case COND_EXPR:
2389 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2390 invert_truthvalue (TREE_OPERAND (arg, 1)),
2391 invert_truthvalue (TREE_OPERAND (arg, 2)));
2392
2393 case COMPOUND_EXPR:
2394 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2395 invert_truthvalue (TREE_OPERAND (arg, 1)));
2396
2397 case WITH_RECORD_EXPR:
2398 return build (WITH_RECORD_EXPR, type,
2399 invert_truthvalue (TREE_OPERAND (arg, 0)),
2400 TREE_OPERAND (arg, 1));
2401
2402 case NON_LVALUE_EXPR:
2403 return invert_truthvalue (TREE_OPERAND (arg, 0));
2404
2405 case NOP_EXPR:
2406 case CONVERT_EXPR:
2407 case FLOAT_EXPR:
2408 return build1 (TREE_CODE (arg), type,
2409 invert_truthvalue (TREE_OPERAND (arg, 0)));
2410
2411 case BIT_AND_EXPR:
2412 if (!integer_onep (TREE_OPERAND (arg, 1)))
2413 break;
2414 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2415
2416 case SAVE_EXPR:
2417 return build1 (TRUTH_NOT_EXPR, type, arg);
2418
2419 case CLEANUP_POINT_EXPR:
2420 return build1 (CLEANUP_POINT_EXPR, type,
2421 invert_truthvalue (TREE_OPERAND (arg, 0)));
2422
2423 default:
2424 break;
2425 }
2426 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2427 abort ();
2428 return build1 (TRUTH_NOT_EXPR, type, arg);
2429 }
2430
2431 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2432 operands are another bit-wise operation with a common input. If so,
2433 distribute the bit operations to save an operation and possibly two if
2434 constants are involved. For example, convert
2435 (A | B) & (A | C) into A | (B & C)
2436 Further simplification will occur if B and C are constants.
2437
2438 If this optimization cannot be done, 0 will be returned. */
2439
2440 static tree
2441 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2442 {
2443 tree common;
2444 tree left, right;
2445
2446 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2447 || TREE_CODE (arg0) == code
2448 || (TREE_CODE (arg0) != BIT_AND_EXPR
2449 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2450 return 0;
2451
2452 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2453 {
2454 common = TREE_OPERAND (arg0, 0);
2455 left = TREE_OPERAND (arg0, 1);
2456 right = TREE_OPERAND (arg1, 1);
2457 }
2458 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2459 {
2460 common = TREE_OPERAND (arg0, 0);
2461 left = TREE_OPERAND (arg0, 1);
2462 right = TREE_OPERAND (arg1, 0);
2463 }
2464 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2465 {
2466 common = TREE_OPERAND (arg0, 1);
2467 left = TREE_OPERAND (arg0, 0);
2468 right = TREE_OPERAND (arg1, 1);
2469 }
2470 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2471 {
2472 common = TREE_OPERAND (arg0, 1);
2473 left = TREE_OPERAND (arg0, 0);
2474 right = TREE_OPERAND (arg1, 0);
2475 }
2476 else
2477 return 0;
2478
2479 return fold (build (TREE_CODE (arg0), type, common,
2480 fold (build (code, type, left, right))));
2481 }
2482 \f
2483 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2484 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2485
2486 static tree
2487 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
2488 int unsignedp)
2489 {
2490 tree result = build (BIT_FIELD_REF, type, inner,
2491 size_int (bitsize), bitsize_int (bitpos));
2492
2493 TREE_UNSIGNED (result) = unsignedp;
2494
2495 return result;
2496 }
2497
2498 /* Optimize a bit-field compare.
2499
2500 There are two cases: First is a compare against a constant and the
2501 second is a comparison of two items where the fields are at the same
2502 bit position relative to the start of a chunk (byte, halfword, word)
2503 large enough to contain it. In these cases we can avoid the shift
2504 implicit in bitfield extractions.
2505
2506 For constants, we emit a compare of the shifted constant with the
2507 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2508 compared. For two fields at the same position, we do the ANDs with the
2509 similar mask and compare the result of the ANDs.
2510
2511 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2512 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2513 are the left and right operands of the comparison, respectively.
2514
2515 If the optimization described above can be done, we return the resulting
2516 tree. Otherwise we return zero. */
2517
2518 static tree
2519 optimize_bit_field_compare (enum tree_code code, tree compare_type,
2520 tree lhs, tree rhs)
2521 {
2522 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2523 tree type = TREE_TYPE (lhs);
2524 tree signed_type, unsigned_type;
2525 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2526 enum machine_mode lmode, rmode, nmode;
2527 int lunsignedp, runsignedp;
2528 int lvolatilep = 0, rvolatilep = 0;
2529 tree linner, rinner = NULL_TREE;
2530 tree mask;
2531 tree offset;
2532
2533 /* Get all the information about the extractions being done. If the bit size
2534 if the same as the size of the underlying object, we aren't doing an
2535 extraction at all and so can do nothing. We also don't want to
2536 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2537 then will no longer be able to replace it. */
2538 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2539 &lunsignedp, &lvolatilep);
2540 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2541 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2542 return 0;
2543
2544 if (!const_p)
2545 {
2546 /* If this is not a constant, we can only do something if bit positions,
2547 sizes, and signedness are the same. */
2548 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2549 &runsignedp, &rvolatilep);
2550
2551 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2552 || lunsignedp != runsignedp || offset != 0
2553 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2554 return 0;
2555 }
2556
2557 /* See if we can find a mode to refer to this field. We should be able to,
2558 but fail if we can't. */
2559 nmode = get_best_mode (lbitsize, lbitpos,
2560 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2561 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2562 TYPE_ALIGN (TREE_TYPE (rinner))),
2563 word_mode, lvolatilep || rvolatilep);
2564 if (nmode == VOIDmode)
2565 return 0;
2566
2567 /* Set signed and unsigned types of the precision of this mode for the
2568 shifts below. */
2569 signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2570 unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2571
2572 /* Compute the bit position and size for the new reference and our offset
2573 within it. If the new reference is the same size as the original, we
2574 won't optimize anything, so return zero. */
2575 nbitsize = GET_MODE_BITSIZE (nmode);
2576 nbitpos = lbitpos & ~ (nbitsize - 1);
2577 lbitpos -= nbitpos;
2578 if (nbitsize == lbitsize)
2579 return 0;
2580
2581 if (BYTES_BIG_ENDIAN)
2582 lbitpos = nbitsize - lbitsize - lbitpos;
2583
2584 /* Make the mask to be used against the extracted field. */
2585 mask = build_int_2 (~0, ~0);
2586 TREE_TYPE (mask) = unsigned_type;
2587 force_fit_type (mask, 0);
2588 mask = convert (unsigned_type, mask);
2589 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2590 mask = const_binop (RSHIFT_EXPR, mask,
2591 size_int (nbitsize - lbitsize - lbitpos), 0);
2592
2593 if (! const_p)
2594 /* If not comparing with constant, just rework the comparison
2595 and return. */
2596 return build (code, compare_type,
2597 build (BIT_AND_EXPR, unsigned_type,
2598 make_bit_field_ref (linner, unsigned_type,
2599 nbitsize, nbitpos, 1),
2600 mask),
2601 build (BIT_AND_EXPR, unsigned_type,
2602 make_bit_field_ref (rinner, unsigned_type,
2603 nbitsize, nbitpos, 1),
2604 mask));
2605
2606 /* Otherwise, we are handling the constant case. See if the constant is too
2607 big for the field. Warn and return a tree of for 0 (false) if so. We do
2608 this not only for its own sake, but to avoid having to test for this
2609 error case below. If we didn't, we might generate wrong code.
2610
2611 For unsigned fields, the constant shifted right by the field length should
2612 be all zero. For signed fields, the high-order bits should agree with
2613 the sign bit. */
2614
2615 if (lunsignedp)
2616 {
2617 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2618 convert (unsigned_type, rhs),
2619 size_int (lbitsize), 0)))
2620 {
2621 warning ("comparison is always %d due to width of bit-field",
2622 code == NE_EXPR);
2623 return convert (compare_type,
2624 (code == NE_EXPR
2625 ? integer_one_node : integer_zero_node));
2626 }
2627 }
2628 else
2629 {
2630 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2631 size_int (lbitsize - 1), 0);
2632 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2633 {
2634 warning ("comparison is always %d due to width of bit-field",
2635 code == NE_EXPR);
2636 return convert (compare_type,
2637 (code == NE_EXPR
2638 ? integer_one_node : integer_zero_node));
2639 }
2640 }
2641
2642 /* Single-bit compares should always be against zero. */
2643 if (lbitsize == 1 && ! integer_zerop (rhs))
2644 {
2645 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2646 rhs = convert (type, integer_zero_node);
2647 }
2648
2649 /* Make a new bitfield reference, shift the constant over the
2650 appropriate number of bits and mask it with the computed mask
2651 (in case this was a signed field). If we changed it, make a new one. */
2652 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2653 if (lvolatilep)
2654 {
2655 TREE_SIDE_EFFECTS (lhs) = 1;
2656 TREE_THIS_VOLATILE (lhs) = 1;
2657 }
2658
2659 rhs = fold (const_binop (BIT_AND_EXPR,
2660 const_binop (LSHIFT_EXPR,
2661 convert (unsigned_type, rhs),
2662 size_int (lbitpos), 0),
2663 mask, 0));
2664
2665 return build (code, compare_type,
2666 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2667 rhs);
2668 }
2669 \f
2670 /* Subroutine for fold_truthop: decode a field reference.
2671
2672 If EXP is a comparison reference, we return the innermost reference.
2673
2674 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2675 set to the starting bit number.
2676
2677 If the innermost field can be completely contained in a mode-sized
2678 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2679
2680 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2681 otherwise it is not changed.
2682
2683 *PUNSIGNEDP is set to the signedness of the field.
2684
2685 *PMASK is set to the mask used. This is either contained in a
2686 BIT_AND_EXPR or derived from the width of the field.
2687
2688 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2689
2690 Return 0 if this is not a component reference or is one that we can't
2691 do anything with. */
2692
2693 static tree
2694 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
2695 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
2696 int *punsignedp, int *pvolatilep,
2697 tree *pmask, tree *pand_mask)
2698 {
2699 tree outer_type = 0;
2700 tree and_mask = 0;
2701 tree mask, inner, offset;
2702 tree unsigned_type;
2703 unsigned int precision;
2704
2705 /* All the optimizations using this function assume integer fields.
2706 There are problems with FP fields since the type_for_size call
2707 below can fail for, e.g., XFmode. */
2708 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2709 return 0;
2710
2711 /* We are interested in the bare arrangement of bits, so strip everything
2712 that doesn't affect the machine mode. However, record the type of the
2713 outermost expression if it may matter below. */
2714 if (TREE_CODE (exp) == NOP_EXPR
2715 || TREE_CODE (exp) == CONVERT_EXPR
2716 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2717 outer_type = TREE_TYPE (exp);
2718 STRIP_NOPS (exp);
2719
2720 if (TREE_CODE (exp) == BIT_AND_EXPR)
2721 {
2722 and_mask = TREE_OPERAND (exp, 1);
2723 exp = TREE_OPERAND (exp, 0);
2724 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2725 if (TREE_CODE (and_mask) != INTEGER_CST)
2726 return 0;
2727 }
2728
2729 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2730 punsignedp, pvolatilep);
2731 if ((inner == exp && and_mask == 0)
2732 || *pbitsize < 0 || offset != 0
2733 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
2734 return 0;
2735
2736 /* If the number of bits in the reference is the same as the bitsize of
2737 the outer type, then the outer type gives the signedness. Otherwise
2738 (in case of a small bitfield) the signedness is unchanged. */
2739 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
2740 *punsignedp = TREE_UNSIGNED (outer_type);
2741
2742 /* Compute the mask to access the bitfield. */
2743 unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
2744 precision = TYPE_PRECISION (unsigned_type);
2745
2746 mask = build_int_2 (~0, ~0);
2747 TREE_TYPE (mask) = unsigned_type;
2748 force_fit_type (mask, 0);
2749 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2750 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2751
2752 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2753 if (and_mask != 0)
2754 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2755 convert (unsigned_type, and_mask), mask));
2756
2757 *pmask = mask;
2758 *pand_mask = and_mask;
2759 return inner;
2760 }
2761
2762 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
2763 bit positions. */
2764
2765 static int
2766 all_ones_mask_p (tree mask, int size)
2767 {
2768 tree type = TREE_TYPE (mask);
2769 unsigned int precision = TYPE_PRECISION (type);
2770 tree tmask;
2771
2772 tmask = build_int_2 (~0, ~0);
2773 TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
2774 force_fit_type (tmask, 0);
2775 return
2776 tree_int_cst_equal (mask,
2777 const_binop (RSHIFT_EXPR,
2778 const_binop (LSHIFT_EXPR, tmask,
2779 size_int (precision - size),
2780 0),
2781 size_int (precision - size), 0));
2782 }
2783
2784 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
2785 represents the sign bit of EXP's type. If EXP represents a sign
2786 or zero extension, also test VAL against the unextended type.
2787 The return value is the (sub)expression whose sign bit is VAL,
2788 or NULL_TREE otherwise. */
2789
2790 static tree
2791 sign_bit_p (tree exp, tree val)
2792 {
2793 unsigned HOST_WIDE_INT mask_lo, lo;
2794 HOST_WIDE_INT mask_hi, hi;
2795 int width;
2796 tree t;
2797
2798 /* Tree EXP must have an integral type. */
2799 t = TREE_TYPE (exp);
2800 if (! INTEGRAL_TYPE_P (t))
2801 return NULL_TREE;
2802
2803 /* Tree VAL must be an integer constant. */
2804 if (TREE_CODE (val) != INTEGER_CST
2805 || TREE_CONSTANT_OVERFLOW (val))
2806 return NULL_TREE;
2807
2808 width = TYPE_PRECISION (t);
2809 if (width > HOST_BITS_PER_WIDE_INT)
2810 {
2811 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
2812 lo = 0;
2813
2814 mask_hi = ((unsigned HOST_WIDE_INT) -1
2815 >> (2 * HOST_BITS_PER_WIDE_INT - width));
2816 mask_lo = -1;
2817 }
2818 else
2819 {
2820 hi = 0;
2821 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
2822
2823 mask_hi = 0;
2824 mask_lo = ((unsigned HOST_WIDE_INT) -1
2825 >> (HOST_BITS_PER_WIDE_INT - width));
2826 }
2827
2828 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
2829 treat VAL as if it were unsigned. */
2830 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
2831 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
2832 return exp;
2833
2834 /* Handle extension from a narrower type. */
2835 if (TREE_CODE (exp) == NOP_EXPR
2836 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
2837 return sign_bit_p (TREE_OPERAND (exp, 0), val);
2838
2839 return NULL_TREE;
2840 }
2841
2842 /* Subroutine for fold_truthop: determine if an operand is simple enough
2843 to be evaluated unconditionally. */
2844
2845 static int
2846 simple_operand_p (tree exp)
2847 {
2848 /* Strip any conversions that don't change the machine mode. */
2849 while ((TREE_CODE (exp) == NOP_EXPR
2850 || TREE_CODE (exp) == CONVERT_EXPR)
2851 && (TYPE_MODE (TREE_TYPE (exp))
2852 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2853 exp = TREE_OPERAND (exp, 0);
2854
2855 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2856 || (DECL_P (exp)
2857 && ! TREE_ADDRESSABLE (exp)
2858 && ! TREE_THIS_VOLATILE (exp)
2859 && ! DECL_NONLOCAL (exp)
2860 /* Don't regard global variables as simple. They may be
2861 allocated in ways unknown to the compiler (shared memory,
2862 #pragma weak, etc). */
2863 && ! TREE_PUBLIC (exp)
2864 && ! DECL_EXTERNAL (exp)
2865 /* Loading a static variable is unduly expensive, but global
2866 registers aren't expensive. */
2867 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2868 }
2869 \f
2870 /* The following functions are subroutines to fold_range_test and allow it to
2871 try to change a logical combination of comparisons into a range test.
2872
2873 For example, both
2874 X == 2 || X == 3 || X == 4 || X == 5
2875 and
2876 X >= 2 && X <= 5
2877 are converted to
2878 (unsigned) (X - 2) <= 3
2879
2880 We describe each set of comparisons as being either inside or outside
2881 a range, using a variable named like IN_P, and then describe the
2882 range with a lower and upper bound. If one of the bounds is omitted,
2883 it represents either the highest or lowest value of the type.
2884
2885 In the comments below, we represent a range by two numbers in brackets
2886 preceded by a "+" to designate being inside that range, or a "-" to
2887 designate being outside that range, so the condition can be inverted by
2888 flipping the prefix. An omitted bound is represented by a "-". For
2889 example, "- [-, 10]" means being outside the range starting at the lowest
2890 possible value and ending at 10, in other words, being greater than 10.
2891 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
2892 always false.
2893
2894 We set up things so that the missing bounds are handled in a consistent
2895 manner so neither a missing bound nor "true" and "false" need to be
2896 handled using a special case. */
2897
2898 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
2899 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
2900 and UPPER1_P are nonzero if the respective argument is an upper bound
2901 and zero for a lower. TYPE, if nonzero, is the type of the result; it
2902 must be specified for a comparison. ARG1 will be converted to ARG0's
2903 type if both are specified. */
2904
2905 static tree
2906 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
2907 tree arg1, int upper1_p)
2908 {
2909 tree tem;
2910 int result;
2911 int sgn0, sgn1;
2912
2913 /* If neither arg represents infinity, do the normal operation.
2914 Else, if not a comparison, return infinity. Else handle the special
2915 comparison rules. Note that most of the cases below won't occur, but
2916 are handled for consistency. */
2917
2918 if (arg0 != 0 && arg1 != 0)
2919 {
2920 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
2921 arg0, convert (TREE_TYPE (arg0), arg1)));
2922 STRIP_NOPS (tem);
2923 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
2924 }
2925
2926 if (TREE_CODE_CLASS (code) != '<')
2927 return 0;
2928
2929 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
2930 for neither. In real maths, we cannot assume open ended ranges are
2931 the same. But, this is computer arithmetic, where numbers are finite.
2932 We can therefore make the transformation of any unbounded range with
2933 the value Z, Z being greater than any representable number. This permits
2934 us to treat unbounded ranges as equal. */
2935 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
2936 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
2937 switch (code)
2938 {
2939 case EQ_EXPR:
2940 result = sgn0 == sgn1;
2941 break;
2942 case NE_EXPR:
2943 result = sgn0 != sgn1;
2944 break;
2945 case LT_EXPR:
2946 result = sgn0 < sgn1;
2947 break;
2948 case LE_EXPR:
2949 result = sgn0 <= sgn1;
2950 break;
2951 case GT_EXPR:
2952 result = sgn0 > sgn1;
2953 break;
2954 case GE_EXPR:
2955 result = sgn0 >= sgn1;
2956 break;
2957 default:
2958 abort ();
2959 }
2960
2961 return convert (type, result ? integer_one_node : integer_zero_node);
2962 }
2963 \f
2964 /* Given EXP, a logical expression, set the range it is testing into
2965 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
2966 actually being tested. *PLOW and *PHIGH will be made of the same type
2967 as the returned expression. If EXP is not a comparison, we will most
2968 likely not be returning a useful value and range. */
2969
2970 static tree
2971 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
2972 {
2973 enum tree_code code;
2974 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
2975 tree orig_type = NULL_TREE;
2976 int in_p, n_in_p;
2977 tree low, high, n_low, n_high;
2978
2979 /* Start with simply saying "EXP != 0" and then look at the code of EXP
2980 and see if we can refine the range. Some of the cases below may not
2981 happen, but it doesn't seem worth worrying about this. We "continue"
2982 the outer loop when we've changed something; otherwise we "break"
2983 the switch, which will "break" the while. */
2984
2985 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
2986
2987 while (1)
2988 {
2989 code = TREE_CODE (exp);
2990
2991 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
2992 {
2993 if (first_rtl_op (code) > 0)
2994 arg0 = TREE_OPERAND (exp, 0);
2995 if (TREE_CODE_CLASS (code) == '<'
2996 || TREE_CODE_CLASS (code) == '1'
2997 || TREE_CODE_CLASS (code) == '2')
2998 type = TREE_TYPE (arg0);
2999 if (TREE_CODE_CLASS (code) == '2'
3000 || TREE_CODE_CLASS (code) == '<'
3001 || (TREE_CODE_CLASS (code) == 'e'
3002 && TREE_CODE_LENGTH (code) > 1))
3003 arg1 = TREE_OPERAND (exp, 1);
3004 }
3005
3006 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3007 lose a cast by accident. */
3008 if (type != NULL_TREE && orig_type == NULL_TREE)
3009 orig_type = type;
3010
3011 switch (code)
3012 {
3013 case TRUTH_NOT_EXPR:
3014 in_p = ! in_p, exp = arg0;
3015 continue;
3016
3017 case EQ_EXPR: case NE_EXPR:
3018 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3019 /* We can only do something if the range is testing for zero
3020 and if the second operand is an integer constant. Note that
3021 saying something is "in" the range we make is done by
3022 complementing IN_P since it will set in the initial case of
3023 being not equal to zero; "out" is leaving it alone. */
3024 if (low == 0 || high == 0
3025 || ! integer_zerop (low) || ! integer_zerop (high)
3026 || TREE_CODE (arg1) != INTEGER_CST)
3027 break;
3028
3029 switch (code)
3030 {
3031 case NE_EXPR: /* - [c, c] */
3032 low = high = arg1;
3033 break;
3034 case EQ_EXPR: /* + [c, c] */
3035 in_p = ! in_p, low = high = arg1;
3036 break;
3037 case GT_EXPR: /* - [-, c] */
3038 low = 0, high = arg1;
3039 break;
3040 case GE_EXPR: /* + [c, -] */
3041 in_p = ! in_p, low = arg1, high = 0;
3042 break;
3043 case LT_EXPR: /* - [c, -] */
3044 low = arg1, high = 0;
3045 break;
3046 case LE_EXPR: /* + [-, c] */
3047 in_p = ! in_p, low = 0, high = arg1;
3048 break;
3049 default:
3050 abort ();
3051 }
3052
3053 exp = arg0;
3054
3055 /* If this is an unsigned comparison, we also know that EXP is
3056 greater than or equal to zero. We base the range tests we make
3057 on that fact, so we record it here so we can parse existing
3058 range tests. */
3059 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3060 {
3061 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3062 1, convert (type, integer_zero_node),
3063 NULL_TREE))
3064 break;
3065
3066 in_p = n_in_p, low = n_low, high = n_high;
3067
3068 /* If the high bound is missing, but we
3069 have a low bound, reverse the range so
3070 it goes from zero to the low bound minus 1. */
3071 if (high == 0 && low)
3072 {
3073 in_p = ! in_p;
3074 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3075 integer_one_node, 0);
3076 low = convert (type, integer_zero_node);
3077 }
3078 }
3079 continue;
3080
3081 case NEGATE_EXPR:
3082 /* (-x) IN [a,b] -> x in [-b, -a] */
3083 n_low = range_binop (MINUS_EXPR, type,
3084 convert (type, integer_zero_node), 0, high, 1);
3085 n_high = range_binop (MINUS_EXPR, type,
3086 convert (type, integer_zero_node), 0, low, 0);
3087 low = n_low, high = n_high;
3088 exp = arg0;
3089 continue;
3090
3091 case BIT_NOT_EXPR:
3092 /* ~ X -> -X - 1 */
3093 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3094 convert (type, integer_one_node));
3095 continue;
3096
3097 case PLUS_EXPR: case MINUS_EXPR:
3098 if (TREE_CODE (arg1) != INTEGER_CST)
3099 break;
3100
3101 /* If EXP is signed, any overflow in the computation is undefined,
3102 so we don't worry about it so long as our computations on
3103 the bounds don't overflow. For unsigned, overflow is defined
3104 and this is exactly the right thing. */
3105 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3106 type, low, 0, arg1, 0);
3107 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3108 type, high, 1, arg1, 0);
3109 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3110 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3111 break;
3112
3113 /* Check for an unsigned range which has wrapped around the maximum
3114 value thus making n_high < n_low, and normalize it. */
3115 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3116 {
3117 low = range_binop (PLUS_EXPR, type, n_high, 0,
3118 integer_one_node, 0);
3119 high = range_binop (MINUS_EXPR, type, n_low, 0,
3120 integer_one_node, 0);
3121
3122 /* If the range is of the form +/- [ x+1, x ], we won't
3123 be able to normalize it. But then, it represents the
3124 whole range or the empty set, so make it
3125 +/- [ -, - ]. */
3126 if (tree_int_cst_equal (n_low, low)
3127 && tree_int_cst_equal (n_high, high))
3128 low = high = 0;
3129 else
3130 in_p = ! in_p;
3131 }
3132 else
3133 low = n_low, high = n_high;
3134
3135 exp = arg0;
3136 continue;
3137
3138 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3139 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3140 break;
3141
3142 if (! INTEGRAL_TYPE_P (type)
3143 || (low != 0 && ! int_fits_type_p (low, type))
3144 || (high != 0 && ! int_fits_type_p (high, type)))
3145 break;
3146
3147 n_low = low, n_high = high;
3148
3149 if (n_low != 0)
3150 n_low = convert (type, n_low);
3151
3152 if (n_high != 0)
3153 n_high = convert (type, n_high);
3154
3155 /* If we're converting from an unsigned to a signed type,
3156 we will be doing the comparison as unsigned. The tests above
3157 have already verified that LOW and HIGH are both positive.
3158
3159 So we have to make sure that the original unsigned value will
3160 be interpreted as positive. */
3161 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3162 {
3163 tree equiv_type = (*lang_hooks.types.type_for_mode)
3164 (TYPE_MODE (type), 1);
3165 tree high_positive;
3166
3167 /* A range without an upper bound is, naturally, unbounded.
3168 Since convert would have cropped a very large value, use
3169 the max value for the destination type. */
3170 high_positive
3171 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3172 : TYPE_MAX_VALUE (type);
3173
3174 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3175 high_positive = fold (build (RSHIFT_EXPR, type,
3176 convert (type, high_positive),
3177 convert (type, integer_one_node)));
3178
3179 /* If the low bound is specified, "and" the range with the
3180 range for which the original unsigned value will be
3181 positive. */
3182 if (low != 0)
3183 {
3184 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3185 1, n_low, n_high,
3186 1, convert (type, integer_zero_node),
3187 high_positive))
3188 break;
3189
3190 in_p = (n_in_p == in_p);
3191 }
3192 else
3193 {
3194 /* Otherwise, "or" the range with the range of the input
3195 that will be interpreted as negative. */
3196 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3197 0, n_low, n_high,
3198 1, convert (type, integer_zero_node),
3199 high_positive))
3200 break;
3201
3202 in_p = (in_p != n_in_p);
3203 }
3204 }
3205
3206 exp = arg0;
3207 low = n_low, high = n_high;
3208 continue;
3209
3210 default:
3211 break;
3212 }
3213
3214 break;
3215 }
3216
3217 /* If EXP is a constant, we can evaluate whether this is true or false. */
3218 if (TREE_CODE (exp) == INTEGER_CST)
3219 {
3220 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3221 exp, 0, low, 0))
3222 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3223 exp, 1, high, 1)));
3224 low = high = 0;
3225 exp = 0;
3226 }
3227
3228 *pin_p = in_p, *plow = low, *phigh = high;
3229 return exp;
3230 }
3231 \f
3232 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3233 type, TYPE, return an expression to test if EXP is in (or out of, depending
3234 on IN_P) the range. */
3235
3236 static tree
3237 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3238 {
3239 tree etype = TREE_TYPE (exp);
3240 tree value;
3241
3242 if (! in_p
3243 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3244 return invert_truthvalue (value);
3245
3246 if (low == 0 && high == 0)
3247 return convert (type, integer_one_node);
3248
3249 if (low == 0)
3250 return fold (build (LE_EXPR, type, exp, high));
3251
3252 if (high == 0)
3253 return fold (build (GE_EXPR, type, exp, low));
3254
3255 if (operand_equal_p (low, high, 0))
3256 return fold (build (EQ_EXPR, type, exp, low));
3257
3258 if (integer_zerop (low))
3259 {
3260 if (! TREE_UNSIGNED (etype))
3261 {
3262 etype = (*lang_hooks.types.unsigned_type) (etype);
3263 high = convert (etype, high);
3264 exp = convert (etype, exp);
3265 }
3266 return build_range_check (type, exp, 1, 0, high);
3267 }
3268
3269 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3270 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3271 {
3272 unsigned HOST_WIDE_INT lo;
3273 HOST_WIDE_INT hi;
3274 int prec;
3275
3276 prec = TYPE_PRECISION (etype);
3277 if (prec <= HOST_BITS_PER_WIDE_INT)
3278 {
3279 hi = 0;
3280 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3281 }
3282 else
3283 {
3284 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3285 lo = (unsigned HOST_WIDE_INT) -1;
3286 }
3287
3288 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3289 {
3290 if (TREE_UNSIGNED (etype))
3291 {
3292 etype = (*lang_hooks.types.signed_type) (etype);
3293 exp = convert (etype, exp);
3294 }
3295 return fold (build (GT_EXPR, type, exp,
3296 convert (etype, integer_zero_node)));
3297 }
3298 }
3299
3300 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3301 && ! TREE_OVERFLOW (value))
3302 return build_range_check (type,
3303 fold (build (MINUS_EXPR, etype, exp, low)),
3304 1, convert (etype, integer_zero_node), value);
3305
3306 return 0;
3307 }
3308 \f
3309 /* Given two ranges, see if we can merge them into one. Return 1 if we
3310 can, 0 if we can't. Set the output range into the specified parameters. */
3311
3312 static int
3313 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3314 tree high0, int in1_p, tree low1, tree high1)
3315 {
3316 int no_overlap;
3317 int subset;
3318 int temp;
3319 tree tem;
3320 int in_p;
3321 tree low, high;
3322 int lowequal = ((low0 == 0 && low1 == 0)
3323 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3324 low0, 0, low1, 0)));
3325 int highequal = ((high0 == 0 && high1 == 0)
3326 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3327 high0, 1, high1, 1)));
3328
3329 /* Make range 0 be the range that starts first, or ends last if they
3330 start at the same value. Swap them if it isn't. */
3331 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3332 low0, 0, low1, 0))
3333 || (lowequal
3334 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3335 high1, 1, high0, 1))))
3336 {
3337 temp = in0_p, in0_p = in1_p, in1_p = temp;
3338 tem = low0, low0 = low1, low1 = tem;
3339 tem = high0, high0 = high1, high1 = tem;
3340 }
3341
3342 /* Now flag two cases, whether the ranges are disjoint or whether the
3343 second range is totally subsumed in the first. Note that the tests
3344 below are simplified by the ones above. */
3345 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3346 high0, 1, low1, 0));
3347 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3348 high1, 1, high0, 1));
3349
3350 /* We now have four cases, depending on whether we are including or
3351 excluding the two ranges. */
3352 if (in0_p && in1_p)
3353 {
3354 /* If they don't overlap, the result is false. If the second range
3355 is a subset it is the result. Otherwise, the range is from the start
3356 of the second to the end of the first. */
3357 if (no_overlap)
3358 in_p = 0, low = high = 0;
3359 else if (subset)
3360 in_p = 1, low = low1, high = high1;
3361 else
3362 in_p = 1, low = low1, high = high0;
3363 }
3364
3365 else if (in0_p && ! in1_p)
3366 {
3367 /* If they don't overlap, the result is the first range. If they are
3368 equal, the result is false. If the second range is a subset of the
3369 first, and the ranges begin at the same place, we go from just after
3370 the end of the first range to the end of the second. If the second
3371 range is not a subset of the first, or if it is a subset and both
3372 ranges end at the same place, the range starts at the start of the
3373 first range and ends just before the second range.
3374 Otherwise, we can't describe this as a single range. */
3375 if (no_overlap)
3376 in_p = 1, low = low0, high = high0;
3377 else if (lowequal && highequal)
3378 in_p = 0, low = high = 0;
3379 else if (subset && lowequal)
3380 {
3381 in_p = 1, high = high0;
3382 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3383 integer_one_node, 0);
3384 }
3385 else if (! subset || highequal)
3386 {
3387 in_p = 1, low = low0;
3388 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3389 integer_one_node, 0);
3390 }
3391 else
3392 return 0;
3393 }
3394
3395 else if (! in0_p && in1_p)
3396 {
3397 /* If they don't overlap, the result is the second range. If the second
3398 is a subset of the first, the result is false. Otherwise,
3399 the range starts just after the first range and ends at the
3400 end of the second. */
3401 if (no_overlap)
3402 in_p = 1, low = low1, high = high1;
3403 else if (subset || highequal)
3404 in_p = 0, low = high = 0;
3405 else
3406 {
3407 in_p = 1, high = high1;
3408 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3409 integer_one_node, 0);
3410 }
3411 }
3412
3413 else
3414 {
3415 /* The case where we are excluding both ranges. Here the complex case
3416 is if they don't overlap. In that case, the only time we have a
3417 range is if they are adjacent. If the second is a subset of the
3418 first, the result is the first. Otherwise, the range to exclude
3419 starts at the beginning of the first range and ends at the end of the
3420 second. */
3421 if (no_overlap)
3422 {
3423 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3424 range_binop (PLUS_EXPR, NULL_TREE,
3425 high0, 1,
3426 integer_one_node, 1),
3427 1, low1, 0)))
3428 in_p = 0, low = low0, high = high1;
3429 else
3430 return 0;
3431 }
3432 else if (subset)
3433 in_p = 0, low = low0, high = high0;
3434 else
3435 in_p = 0, low = low0, high = high1;
3436 }
3437
3438 *pin_p = in_p, *plow = low, *phigh = high;
3439 return 1;
3440 }
3441 \f
3442 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3443 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3444 #endif
3445
3446 /* EXP is some logical combination of boolean tests. See if we can
3447 merge it into some range test. Return the new tree if so. */
3448
3449 static tree
3450 fold_range_test (tree exp)
3451 {
3452 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3453 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3454 int in0_p, in1_p, in_p;
3455 tree low0, low1, low, high0, high1, high;
3456 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3457 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3458 tree tem;
3459
3460 /* If this is an OR operation, invert both sides; we will invert
3461 again at the end. */
3462 if (or_op)
3463 in0_p = ! in0_p, in1_p = ! in1_p;
3464
3465 /* If both expressions are the same, if we can merge the ranges, and we
3466 can build the range test, return it or it inverted. If one of the
3467 ranges is always true or always false, consider it to be the same
3468 expression as the other. */
3469 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3470 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3471 in1_p, low1, high1)
3472 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3473 lhs != 0 ? lhs
3474 : rhs != 0 ? rhs : integer_zero_node,
3475 in_p, low, high))))
3476 return or_op ? invert_truthvalue (tem) : tem;
3477
3478 /* On machines where the branch cost is expensive, if this is a
3479 short-circuited branch and the underlying object on both sides
3480 is the same, make a non-short-circuit operation. */
3481 else if (RANGE_TEST_NON_SHORT_CIRCUIT
3482 && lhs != 0 && rhs != 0
3483 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3484 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3485 && operand_equal_p (lhs, rhs, 0))
3486 {
3487 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3488 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3489 which cases we can't do this. */
3490 if (simple_operand_p (lhs))
3491 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3492 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3493 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3494 TREE_OPERAND (exp, 1));
3495
3496 else if ((*lang_hooks.decls.global_bindings_p) () == 0
3497 && ! CONTAINS_PLACEHOLDER_P (lhs))
3498 {
3499 tree common = save_expr (lhs);
3500
3501 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3502 or_op ? ! in0_p : in0_p,
3503 low0, high0))
3504 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3505 or_op ? ! in1_p : in1_p,
3506 low1, high1))))
3507 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3508 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3509 TREE_TYPE (exp), lhs, rhs);
3510 }
3511 }
3512
3513 return 0;
3514 }
3515 \f
3516 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3517 bit value. Arrange things so the extra bits will be set to zero if and
3518 only if C is signed-extended to its full width. If MASK is nonzero,
3519 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3520
3521 static tree
3522 unextend (tree c, int p, int unsignedp, tree mask)
3523 {
3524 tree type = TREE_TYPE (c);
3525 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3526 tree temp;
3527
3528 if (p == modesize || unsignedp)
3529 return c;
3530
3531 /* We work by getting just the sign bit into the low-order bit, then
3532 into the high-order bit, then sign-extend. We then XOR that value
3533 with C. */
3534 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3535 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3536
3537 /* We must use a signed type in order to get an arithmetic right shift.
3538 However, we must also avoid introducing accidental overflows, so that
3539 a subsequent call to integer_zerop will work. Hence we must
3540 do the type conversion here. At this point, the constant is either
3541 zero or one, and the conversion to a signed type can never overflow.
3542 We could get an overflow if this conversion is done anywhere else. */
3543 if (TREE_UNSIGNED (type))
3544 temp = convert ((*lang_hooks.types.signed_type) (type), temp);
3545
3546 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3547 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3548 if (mask != 0)
3549 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3550 /* If necessary, convert the type back to match the type of C. */
3551 if (TREE_UNSIGNED (type))
3552 temp = convert (type, temp);
3553
3554 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3555 }
3556 \f
3557 /* Find ways of folding logical expressions of LHS and RHS:
3558 Try to merge two comparisons to the same innermost item.
3559 Look for range tests like "ch >= '0' && ch <= '9'".
3560 Look for combinations of simple terms on machines with expensive branches
3561 and evaluate the RHS unconditionally.
3562
3563 For example, if we have p->a == 2 && p->b == 4 and we can make an
3564 object large enough to span both A and B, we can do this with a comparison
3565 against the object ANDed with the a mask.
3566
3567 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3568 operations to do this with one comparison.
3569
3570 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3571 function and the one above.
3572
3573 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3574 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3575
3576 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3577 two operands.
3578
3579 We return the simplified tree or 0 if no optimization is possible. */
3580
3581 static tree
3582 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
3583 {
3584 /* If this is the "or" of two comparisons, we can do something if
3585 the comparisons are NE_EXPR. If this is the "and", we can do something
3586 if the comparisons are EQ_EXPR. I.e.,
3587 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3588
3589 WANTED_CODE is this operation code. For single bit fields, we can
3590 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3591 comparison for one-bit fields. */
3592
3593 enum tree_code wanted_code;
3594 enum tree_code lcode, rcode;
3595 tree ll_arg, lr_arg, rl_arg, rr_arg;
3596 tree ll_inner, lr_inner, rl_inner, rr_inner;
3597 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3598 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3599 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3600 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3601 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3602 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3603 enum machine_mode lnmode, rnmode;
3604 tree ll_mask, lr_mask, rl_mask, rr_mask;
3605 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3606 tree l_const, r_const;
3607 tree lntype, rntype, result;
3608 int first_bit, end_bit;
3609 int volatilep;
3610
3611 /* Start by getting the comparison codes. Fail if anything is volatile.
3612 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3613 it were surrounded with a NE_EXPR. */
3614
3615 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3616 return 0;
3617
3618 lcode = TREE_CODE (lhs);
3619 rcode = TREE_CODE (rhs);
3620
3621 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3622 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3623
3624 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3625 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3626
3627 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3628 return 0;
3629
3630 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3631 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3632
3633 ll_arg = TREE_OPERAND (lhs, 0);
3634 lr_arg = TREE_OPERAND (lhs, 1);
3635 rl_arg = TREE_OPERAND (rhs, 0);
3636 rr_arg = TREE_OPERAND (rhs, 1);
3637
3638 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3639 if (simple_operand_p (ll_arg)
3640 && simple_operand_p (lr_arg)
3641 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3642 {
3643 int compcode;
3644
3645 if (operand_equal_p (ll_arg, rl_arg, 0)
3646 && operand_equal_p (lr_arg, rr_arg, 0))
3647 {
3648 int lcompcode, rcompcode;
3649
3650 lcompcode = comparison_to_compcode (lcode);
3651 rcompcode = comparison_to_compcode (rcode);
3652 compcode = (code == TRUTH_AND_EXPR)
3653 ? lcompcode & rcompcode
3654 : lcompcode | rcompcode;
3655 }
3656 else if (operand_equal_p (ll_arg, rr_arg, 0)
3657 && operand_equal_p (lr_arg, rl_arg, 0))
3658 {
3659 int lcompcode, rcompcode;
3660
3661 rcode = swap_tree_comparison (rcode);
3662 lcompcode = comparison_to_compcode (lcode);
3663 rcompcode = comparison_to_compcode (rcode);
3664 compcode = (code == TRUTH_AND_EXPR)
3665 ? lcompcode & rcompcode
3666 : lcompcode | rcompcode;
3667 }
3668 else
3669 compcode = -1;
3670
3671 if (compcode == COMPCODE_TRUE)
3672 return convert (truth_type, integer_one_node);
3673 else if (compcode == COMPCODE_FALSE)
3674 return convert (truth_type, integer_zero_node);
3675 else if (compcode != -1)
3676 return build (compcode_to_comparison (compcode),
3677 truth_type, ll_arg, lr_arg);
3678 }
3679
3680 /* If the RHS can be evaluated unconditionally and its operands are
3681 simple, it wins to evaluate the RHS unconditionally on machines
3682 with expensive branches. In this case, this isn't a comparison
3683 that can be merged. Avoid doing this if the RHS is a floating-point
3684 comparison since those can trap. */
3685
3686 if (BRANCH_COST >= 2
3687 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3688 && simple_operand_p (rl_arg)
3689 && simple_operand_p (rr_arg))
3690 {
3691 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
3692 if (code == TRUTH_OR_EXPR
3693 && lcode == NE_EXPR && integer_zerop (lr_arg)
3694 && rcode == NE_EXPR && integer_zerop (rr_arg)
3695 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3696 return build (NE_EXPR, truth_type,
3697 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3698 ll_arg, rl_arg),
3699 integer_zero_node);
3700
3701 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
3702 if (code == TRUTH_AND_EXPR
3703 && lcode == EQ_EXPR && integer_zerop (lr_arg)
3704 && rcode == EQ_EXPR && integer_zerop (rr_arg)
3705 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3706 return build (EQ_EXPR, truth_type,
3707 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3708 ll_arg, rl_arg),
3709 integer_zero_node);
3710
3711 return build (code, truth_type, lhs, rhs);
3712 }
3713
3714 /* See if the comparisons can be merged. Then get all the parameters for
3715 each side. */
3716
3717 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3718 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3719 return 0;
3720
3721 volatilep = 0;
3722 ll_inner = decode_field_reference (ll_arg,
3723 &ll_bitsize, &ll_bitpos, &ll_mode,
3724 &ll_unsignedp, &volatilep, &ll_mask,
3725 &ll_and_mask);
3726 lr_inner = decode_field_reference (lr_arg,
3727 &lr_bitsize, &lr_bitpos, &lr_mode,
3728 &lr_unsignedp, &volatilep, &lr_mask,
3729 &lr_and_mask);
3730 rl_inner = decode_field_reference (rl_arg,
3731 &rl_bitsize, &rl_bitpos, &rl_mode,
3732 &rl_unsignedp, &volatilep, &rl_mask,
3733 &rl_and_mask);
3734 rr_inner = decode_field_reference (rr_arg,
3735 &rr_bitsize, &rr_bitpos, &rr_mode,
3736 &rr_unsignedp, &volatilep, &rr_mask,
3737 &rr_and_mask);
3738
3739 /* It must be true that the inner operation on the lhs of each
3740 comparison must be the same if we are to be able to do anything.
3741 Then see if we have constants. If not, the same must be true for
3742 the rhs's. */
3743 if (volatilep || ll_inner == 0 || rl_inner == 0
3744 || ! operand_equal_p (ll_inner, rl_inner, 0))
3745 return 0;
3746
3747 if (TREE_CODE (lr_arg) == INTEGER_CST
3748 && TREE_CODE (rr_arg) == INTEGER_CST)
3749 l_const = lr_arg, r_const = rr_arg;
3750 else if (lr_inner == 0 || rr_inner == 0
3751 || ! operand_equal_p (lr_inner, rr_inner, 0))
3752 return 0;
3753 else
3754 l_const = r_const = 0;
3755
3756 /* If either comparison code is not correct for our logical operation,
3757 fail. However, we can convert a one-bit comparison against zero into
3758 the opposite comparison against that bit being set in the field. */
3759
3760 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3761 if (lcode != wanted_code)
3762 {
3763 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3764 {
3765 /* Make the left operand unsigned, since we are only interested
3766 in the value of one bit. Otherwise we are doing the wrong
3767 thing below. */
3768 ll_unsignedp = 1;
3769 l_const = ll_mask;
3770 }
3771 else
3772 return 0;
3773 }
3774
3775 /* This is analogous to the code for l_const above. */
3776 if (rcode != wanted_code)
3777 {
3778 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3779 {
3780 rl_unsignedp = 1;
3781 r_const = rl_mask;
3782 }
3783 else
3784 return 0;
3785 }
3786
3787 /* After this point all optimizations will generate bit-field
3788 references, which we might not want. */
3789 if (! (*lang_hooks.can_use_bit_fields_p) ())
3790 return 0;
3791
3792 /* See if we can find a mode that contains both fields being compared on
3793 the left. If we can't, fail. Otherwise, update all constants and masks
3794 to be relative to a field of that size. */
3795 first_bit = MIN (ll_bitpos, rl_bitpos);
3796 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3797 lnmode = get_best_mode (end_bit - first_bit, first_bit,
3798 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3799 volatilep);
3800 if (lnmode == VOIDmode)
3801 return 0;
3802
3803 lnbitsize = GET_MODE_BITSIZE (lnmode);
3804 lnbitpos = first_bit & ~ (lnbitsize - 1);
3805 lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
3806 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3807
3808 if (BYTES_BIG_ENDIAN)
3809 {
3810 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3811 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3812 }
3813
3814 ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
3815 size_int (xll_bitpos), 0);
3816 rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
3817 size_int (xrl_bitpos), 0);
3818
3819 if (l_const)
3820 {
3821 l_const = convert (lntype, l_const);
3822 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3823 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3824 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3825 fold (build1 (BIT_NOT_EXPR,
3826 lntype, ll_mask)),
3827 0)))
3828 {
3829 warning ("comparison is always %d", wanted_code == NE_EXPR);
3830
3831 return convert (truth_type,
3832 wanted_code == NE_EXPR
3833 ? integer_one_node : integer_zero_node);
3834 }
3835 }
3836 if (r_const)
3837 {
3838 r_const = convert (lntype, r_const);
3839 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3840 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3841 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3842 fold (build1 (BIT_NOT_EXPR,
3843 lntype, rl_mask)),
3844 0)))
3845 {
3846 warning ("comparison is always %d", wanted_code == NE_EXPR);
3847
3848 return convert (truth_type,
3849 wanted_code == NE_EXPR
3850 ? integer_one_node : integer_zero_node);
3851 }
3852 }
3853
3854 /* If the right sides are not constant, do the same for it. Also,
3855 disallow this optimization if a size or signedness mismatch occurs
3856 between the left and right sides. */
3857 if (l_const == 0)
3858 {
3859 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3860 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3861 /* Make sure the two fields on the right
3862 correspond to the left without being swapped. */
3863 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3864 return 0;
3865
3866 first_bit = MIN (lr_bitpos, rr_bitpos);
3867 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3868 rnmode = get_best_mode (end_bit - first_bit, first_bit,
3869 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3870 volatilep);
3871 if (rnmode == VOIDmode)
3872 return 0;
3873
3874 rnbitsize = GET_MODE_BITSIZE (rnmode);
3875 rnbitpos = first_bit & ~ (rnbitsize - 1);
3876 rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
3877 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3878
3879 if (BYTES_BIG_ENDIAN)
3880 {
3881 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3882 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3883 }
3884
3885 lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
3886 size_int (xlr_bitpos), 0);
3887 rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
3888 size_int (xrr_bitpos), 0);
3889
3890 /* Make a mask that corresponds to both fields being compared.
3891 Do this for both items being compared. If the operands are the
3892 same size and the bits being compared are in the same position
3893 then we can do this by masking both and comparing the masked
3894 results. */
3895 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3896 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3897 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
3898 {
3899 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3900 ll_unsignedp || rl_unsignedp);
3901 if (! all_ones_mask_p (ll_mask, lnbitsize))
3902 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
3903
3904 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
3905 lr_unsignedp || rr_unsignedp);
3906 if (! all_ones_mask_p (lr_mask, rnbitsize))
3907 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
3908
3909 return build (wanted_code, truth_type, lhs, rhs);
3910 }
3911
3912 /* There is still another way we can do something: If both pairs of
3913 fields being compared are adjacent, we may be able to make a wider
3914 field containing them both.
3915
3916 Note that we still must mask the lhs/rhs expressions. Furthermore,
3917 the mask must be shifted to account for the shift done by
3918 make_bit_field_ref. */
3919 if ((ll_bitsize + ll_bitpos == rl_bitpos
3920 && lr_bitsize + lr_bitpos == rr_bitpos)
3921 || (ll_bitpos == rl_bitpos + rl_bitsize
3922 && lr_bitpos == rr_bitpos + rr_bitsize))
3923 {
3924 tree type;
3925
3926 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
3927 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
3928 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
3929 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
3930
3931 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
3932 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
3933 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
3934 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
3935
3936 /* Convert to the smaller type before masking out unwanted bits. */
3937 type = lntype;
3938 if (lntype != rntype)
3939 {
3940 if (lnbitsize > rnbitsize)
3941 {
3942 lhs = convert (rntype, lhs);
3943 ll_mask = convert (rntype, ll_mask);
3944 type = rntype;
3945 }
3946 else if (lnbitsize < rnbitsize)
3947 {
3948 rhs = convert (lntype, rhs);
3949 lr_mask = convert (lntype, lr_mask);
3950 type = lntype;
3951 }
3952 }
3953
3954 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
3955 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3956
3957 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
3958 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
3959
3960 return build (wanted_code, truth_type, lhs, rhs);
3961 }
3962
3963 return 0;
3964 }
3965
3966 /* Handle the case of comparisons with constants. If there is something in
3967 common between the masks, those bits of the constants must be the same.
3968 If not, the condition is always false. Test for this to avoid generating
3969 incorrect code below. */
3970 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3971 if (! integer_zerop (result)
3972 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3973 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3974 {
3975 if (wanted_code == NE_EXPR)
3976 {
3977 warning ("`or' of unmatched not-equal tests is always 1");
3978 return convert (truth_type, integer_one_node);
3979 }
3980 else
3981 {
3982 warning ("`and' of mutually exclusive equal-tests is always 0");
3983 return convert (truth_type, integer_zero_node);
3984 }
3985 }
3986
3987 /* Construct the expression we will return. First get the component
3988 reference we will make. Unless the mask is all ones the width of
3989 that field, perform the mask operation. Then compare with the
3990 merged constant. */
3991 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3992 ll_unsignedp || rl_unsignedp);
3993
3994 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3995 if (! all_ones_mask_p (ll_mask, lnbitsize))
3996 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
3997
3998 return build (wanted_code, truth_type, result,
3999 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4000 }
4001 \f
4002 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4003 constant. */
4004
4005 static tree
4006 optimize_minmax_comparison (tree t)
4007 {
4008 tree type = TREE_TYPE (t);
4009 tree arg0 = TREE_OPERAND (t, 0);
4010 enum tree_code op_code;
4011 tree comp_const = TREE_OPERAND (t, 1);
4012 tree minmax_const;
4013 int consts_equal, consts_lt;
4014 tree inner;
4015
4016 STRIP_SIGN_NOPS (arg0);
4017
4018 op_code = TREE_CODE (arg0);
4019 minmax_const = TREE_OPERAND (arg0, 1);
4020 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4021 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4022 inner = TREE_OPERAND (arg0, 0);
4023
4024 /* If something does not permit us to optimize, return the original tree. */
4025 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4026 || TREE_CODE (comp_const) != INTEGER_CST
4027 || TREE_CONSTANT_OVERFLOW (comp_const)
4028 || TREE_CODE (minmax_const) != INTEGER_CST
4029 || TREE_CONSTANT_OVERFLOW (minmax_const))
4030 return t;
4031
4032 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4033 and GT_EXPR, doing the rest with recursive calls using logical
4034 simplifications. */
4035 switch (TREE_CODE (t))
4036 {
4037 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4038 return
4039 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4040
4041 case GE_EXPR:
4042 return
4043 fold (build (TRUTH_ORIF_EXPR, type,
4044 optimize_minmax_comparison
4045 (build (EQ_EXPR, type, arg0, comp_const)),
4046 optimize_minmax_comparison
4047 (build (GT_EXPR, type, arg0, comp_const))));
4048
4049 case EQ_EXPR:
4050 if (op_code == MAX_EXPR && consts_equal)
4051 /* MAX (X, 0) == 0 -> X <= 0 */
4052 return fold (build (LE_EXPR, type, inner, comp_const));
4053
4054 else if (op_code == MAX_EXPR && consts_lt)
4055 /* MAX (X, 0) == 5 -> X == 5 */
4056 return fold (build (EQ_EXPR, type, inner, comp_const));
4057
4058 else if (op_code == MAX_EXPR)
4059 /* MAX (X, 0) == -1 -> false */
4060 return omit_one_operand (type, integer_zero_node, inner);
4061
4062 else if (consts_equal)
4063 /* MIN (X, 0) == 0 -> X >= 0 */
4064 return fold (build (GE_EXPR, type, inner, comp_const));
4065
4066 else if (consts_lt)
4067 /* MIN (X, 0) == 5 -> false */
4068 return omit_one_operand (type, integer_zero_node, inner);
4069
4070 else
4071 /* MIN (X, 0) == -1 -> X == -1 */
4072 return fold (build (EQ_EXPR, type, inner, comp_const));
4073
4074 case GT_EXPR:
4075 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4076 /* MAX (X, 0) > 0 -> X > 0
4077 MAX (X, 0) > 5 -> X > 5 */
4078 return fold (build (GT_EXPR, type, inner, comp_const));
4079
4080 else if (op_code == MAX_EXPR)
4081 /* MAX (X, 0) > -1 -> true */
4082 return omit_one_operand (type, integer_one_node, inner);
4083
4084 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4085 /* MIN (X, 0) > 0 -> false
4086 MIN (X, 0) > 5 -> false */
4087 return omit_one_operand (type, integer_zero_node, inner);
4088
4089 else
4090 /* MIN (X, 0) > -1 -> X > -1 */
4091 return fold (build (GT_EXPR, type, inner, comp_const));
4092
4093 default:
4094 return t;
4095 }
4096 }
4097 \f
4098 /* T is an integer expression that is being multiplied, divided, or taken a
4099 modulus (CODE says which and what kind of divide or modulus) by a
4100 constant C. See if we can eliminate that operation by folding it with
4101 other operations already in T. WIDE_TYPE, if non-null, is a type that
4102 should be used for the computation if wider than our type.
4103
4104 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4105 (X * 2) + (Y * 4). We must, however, be assured that either the original
4106 expression would not overflow or that overflow is undefined for the type
4107 in the language in question.
4108
4109 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4110 the machine has a multiply-accumulate insn or that this is part of an
4111 addressing calculation.
4112
4113 If we return a non-null expression, it is an equivalent form of the
4114 original computation, but need not be in the original type. */
4115
4116 static tree
4117 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4118 {
4119 /* To avoid exponential search depth, refuse to allow recursion past
4120 three levels. Beyond that (1) it's highly unlikely that we'll find
4121 something interesting and (2) we've probably processed it before
4122 when we built the inner expression. */
4123
4124 static int depth;
4125 tree ret;
4126
4127 if (depth > 3)
4128 return NULL;
4129
4130 depth++;
4131 ret = extract_muldiv_1 (t, c, code, wide_type);
4132 depth--;
4133
4134 return ret;
4135 }
4136
4137 static tree
4138 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4139 {
4140 tree type = TREE_TYPE (t);
4141 enum tree_code tcode = TREE_CODE (t);
4142 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4143 > GET_MODE_SIZE (TYPE_MODE (type)))
4144 ? wide_type : type);
4145 tree t1, t2;
4146 int same_p = tcode == code;
4147 tree op0 = NULL_TREE, op1 = NULL_TREE;
4148
4149 /* Don't deal with constants of zero here; they confuse the code below. */
4150 if (integer_zerop (c))
4151 return NULL_TREE;
4152
4153 if (TREE_CODE_CLASS (tcode) == '1')
4154 op0 = TREE_OPERAND (t, 0);
4155
4156 if (TREE_CODE_CLASS (tcode) == '2')
4157 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4158
4159 /* Note that we need not handle conditional operations here since fold
4160 already handles those cases. So just do arithmetic here. */
4161 switch (tcode)
4162 {
4163 case INTEGER_CST:
4164 /* For a constant, we can always simplify if we are a multiply
4165 or (for divide and modulus) if it is a multiple of our constant. */
4166 if (code == MULT_EXPR
4167 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4168 return const_binop (code, convert (ctype, t), convert (ctype, c), 0);
4169 break;
4170
4171 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4172 /* If op0 is an expression ... */
4173 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4174 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4175 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4176 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4177 /* ... and is unsigned, and its type is smaller than ctype,
4178 then we cannot pass through as widening. */
4179 && ((TREE_UNSIGNED (TREE_TYPE (op0))
4180 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4181 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4182 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4183 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4184 /* ... or its type is larger than ctype,
4185 then we cannot pass through this truncation. */
4186 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4187 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4188 /* ... or signedness changes for division or modulus,
4189 then we cannot pass through this conversion. */
4190 || (code != MULT_EXPR
4191 && (TREE_UNSIGNED (ctype)
4192 != TREE_UNSIGNED (TREE_TYPE (op0))))))
4193 break;
4194
4195 /* Pass the constant down and see if we can make a simplification. If
4196 we can, replace this expression with the inner simplification for
4197 possible later conversion to our or some other type. */
4198 if ((t2 = convert (TREE_TYPE (op0), c)) != 0
4199 && TREE_CODE (t2) == INTEGER_CST
4200 && ! TREE_CONSTANT_OVERFLOW (t2)
4201 && (0 != (t1 = extract_muldiv (op0, t2, code,
4202 code == MULT_EXPR
4203 ? ctype : NULL_TREE))))
4204 return t1;
4205 break;
4206
4207 case NEGATE_EXPR: case ABS_EXPR:
4208 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4209 return fold (build1 (tcode, ctype, convert (ctype, t1)));
4210 break;
4211
4212 case MIN_EXPR: case MAX_EXPR:
4213 /* If widening the type changes the signedness, then we can't perform
4214 this optimization as that changes the result. */
4215 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4216 break;
4217
4218 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4219 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4220 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4221 {
4222 if (tree_int_cst_sgn (c) < 0)
4223 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4224
4225 return fold (build (tcode, ctype, convert (ctype, t1),
4226 convert (ctype, t2)));
4227 }
4228 break;
4229
4230 case WITH_RECORD_EXPR:
4231 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4232 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4233 TREE_OPERAND (t, 1));
4234 break;
4235
4236 case LSHIFT_EXPR: case RSHIFT_EXPR:
4237 /* If the second operand is constant, this is a multiplication
4238 or floor division, by a power of two, so we can treat it that
4239 way unless the multiplier or divisor overflows. */
4240 if (TREE_CODE (op1) == INTEGER_CST
4241 /* const_binop may not detect overflow correctly,
4242 so check for it explicitly here. */
4243 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4244 && TREE_INT_CST_HIGH (op1) == 0
4245 && 0 != (t1 = convert (ctype,
4246 const_binop (LSHIFT_EXPR, size_one_node,
4247 op1, 0)))
4248 && ! TREE_OVERFLOW (t1))
4249 return extract_muldiv (build (tcode == LSHIFT_EXPR
4250 ? MULT_EXPR : FLOOR_DIV_EXPR,
4251 ctype, convert (ctype, op0), t1),
4252 c, code, wide_type);
4253 break;
4254
4255 case PLUS_EXPR: case MINUS_EXPR:
4256 /* See if we can eliminate the operation on both sides. If we can, we
4257 can return a new PLUS or MINUS. If we can't, the only remaining
4258 cases where we can do anything are if the second operand is a
4259 constant. */
4260 t1 = extract_muldiv (op0, c, code, wide_type);
4261 t2 = extract_muldiv (op1, c, code, wide_type);
4262 if (t1 != 0 && t2 != 0
4263 && (code == MULT_EXPR
4264 /* If not multiplication, we can only do this if both operands
4265 are divisible by c. */
4266 || (multiple_of_p (ctype, op0, c)
4267 && multiple_of_p (ctype, op1, c))))
4268 return fold (build (tcode, ctype, convert (ctype, t1),
4269 convert (ctype, t2)));
4270
4271 /* If this was a subtraction, negate OP1 and set it to be an addition.
4272 This simplifies the logic below. */
4273 if (tcode == MINUS_EXPR)
4274 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4275
4276 if (TREE_CODE (op1) != INTEGER_CST)
4277 break;
4278
4279 /* If either OP1 or C are negative, this optimization is not safe for
4280 some of the division and remainder types while for others we need
4281 to change the code. */
4282 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4283 {
4284 if (code == CEIL_DIV_EXPR)
4285 code = FLOOR_DIV_EXPR;
4286 else if (code == FLOOR_DIV_EXPR)
4287 code = CEIL_DIV_EXPR;
4288 else if (code != MULT_EXPR
4289 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4290 break;
4291 }
4292
4293 /* If it's a multiply or a division/modulus operation of a multiple
4294 of our constant, do the operation and verify it doesn't overflow. */
4295 if (code == MULT_EXPR
4296 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4297 {
4298 op1 = const_binop (code, convert (ctype, op1), convert (ctype, c), 0);
4299 if (op1 == 0 || TREE_OVERFLOW (op1))
4300 break;
4301 }
4302 else
4303 break;
4304
4305 /* If we have an unsigned type is not a sizetype, we cannot widen
4306 the operation since it will change the result if the original
4307 computation overflowed. */
4308 if (TREE_UNSIGNED (ctype)
4309 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4310 && ctype != type)
4311 break;
4312
4313 /* If we were able to eliminate our operation from the first side,
4314 apply our operation to the second side and reform the PLUS. */
4315 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4316 return fold (build (tcode, ctype, convert (ctype, t1), op1));
4317
4318 /* The last case is if we are a multiply. In that case, we can
4319 apply the distributive law to commute the multiply and addition
4320 if the multiplication of the constants doesn't overflow. */
4321 if (code == MULT_EXPR)
4322 return fold (build (tcode, ctype, fold (build (code, ctype,
4323 convert (ctype, op0),
4324 convert (ctype, c))),
4325 op1));
4326
4327 break;
4328
4329 case MULT_EXPR:
4330 /* We have a special case here if we are doing something like
4331 (C * 8) % 4 since we know that's zero. */
4332 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4333 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4334 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4335 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4336 return omit_one_operand (type, integer_zero_node, op0);
4337
4338 /* ... fall through ... */
4339
4340 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4341 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4342 /* If we can extract our operation from the LHS, do so and return a
4343 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4344 do something only if the second operand is a constant. */
4345 if (same_p
4346 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4347 return fold (build (tcode, ctype, convert (ctype, t1),
4348 convert (ctype, op1)));
4349 else if (tcode == MULT_EXPR && code == MULT_EXPR
4350 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4351 return fold (build (tcode, ctype, convert (ctype, op0),
4352 convert (ctype, t1)));
4353 else if (TREE_CODE (op1) != INTEGER_CST)
4354 return 0;
4355
4356 /* If these are the same operation types, we can associate them
4357 assuming no overflow. */
4358 if (tcode == code
4359 && 0 != (t1 = const_binop (MULT_EXPR, convert (ctype, op1),
4360 convert (ctype, c), 0))
4361 && ! TREE_OVERFLOW (t1))
4362 return fold (build (tcode, ctype, convert (ctype, op0), t1));
4363
4364 /* If these operations "cancel" each other, we have the main
4365 optimizations of this pass, which occur when either constant is a
4366 multiple of the other, in which case we replace this with either an
4367 operation or CODE or TCODE.
4368
4369 If we have an unsigned type that is not a sizetype, we cannot do
4370 this since it will change the result if the original computation
4371 overflowed. */
4372 if ((! TREE_UNSIGNED (ctype)
4373 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4374 && ! flag_wrapv
4375 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4376 || (tcode == MULT_EXPR
4377 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4378 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4379 {
4380 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4381 return fold (build (tcode, ctype, convert (ctype, op0),
4382 convert (ctype,
4383 const_binop (TRUNC_DIV_EXPR,
4384 op1, c, 0))));
4385 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4386 return fold (build (code, ctype, convert (ctype, op0),
4387 convert (ctype,
4388 const_binop (TRUNC_DIV_EXPR,
4389 c, op1, 0))));
4390 }
4391 break;
4392
4393 default:
4394 break;
4395 }
4396
4397 return 0;
4398 }
4399 \f
4400 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4401 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4402 that we may sometimes modify the tree. */
4403
4404 static tree
4405 strip_compound_expr (tree t, tree s)
4406 {
4407 enum tree_code code = TREE_CODE (t);
4408
4409 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4410 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4411 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4412 return TREE_OPERAND (t, 1);
4413
4414 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4415 don't bother handling any other types. */
4416 else if (code == COND_EXPR)
4417 {
4418 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4419 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4420 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4421 }
4422 else if (TREE_CODE_CLASS (code) == '1')
4423 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4424 else if (TREE_CODE_CLASS (code) == '<'
4425 || TREE_CODE_CLASS (code) == '2')
4426 {
4427 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4428 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4429 }
4430
4431 return t;
4432 }
4433 \f
4434 /* Return a node which has the indicated constant VALUE (either 0 or
4435 1), and is of the indicated TYPE. */
4436
4437 static tree
4438 constant_boolean_node (int value, tree type)
4439 {
4440 if (type == integer_type_node)
4441 return value ? integer_one_node : integer_zero_node;
4442 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4443 return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4444 integer_zero_node);
4445 else
4446 {
4447 tree t = build_int_2 (value, 0);
4448
4449 TREE_TYPE (t) = type;
4450 return t;
4451 }
4452 }
4453
4454 /* Utility function for the following routine, to see how complex a nesting of
4455 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4456 we don't care (to avoid spending too much time on complex expressions.). */
4457
4458 static int
4459 count_cond (tree expr, int lim)
4460 {
4461 int ctrue, cfalse;
4462
4463 if (TREE_CODE (expr) != COND_EXPR)
4464 return 0;
4465 else if (lim <= 0)
4466 return 0;
4467
4468 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4469 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4470 return MIN (lim, 1 + ctrue + cfalse);
4471 }
4472
4473 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4474 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4475 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4476 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4477 COND is the first argument to CODE; otherwise (as in the example
4478 given here), it is the second argument. TYPE is the type of the
4479 original expression. */
4480
4481 static tree
4482 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
4483 tree cond, tree arg, int cond_first_p)
4484 {
4485 tree test, true_value, false_value;
4486 tree lhs = NULL_TREE;
4487 tree rhs = NULL_TREE;
4488 /* In the end, we'll produce a COND_EXPR. Both arms of the
4489 conditional expression will be binary operations. The left-hand
4490 side of the expression to be executed if the condition is true
4491 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4492 of the expression to be executed if the condition is true will be
4493 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4494 but apply to the expression to be executed if the conditional is
4495 false. */
4496 tree *true_lhs;
4497 tree *true_rhs;
4498 tree *false_lhs;
4499 tree *false_rhs;
4500 /* These are the codes to use for the left-hand side and right-hand
4501 side of the COND_EXPR. Normally, they are the same as CODE. */
4502 enum tree_code lhs_code = code;
4503 enum tree_code rhs_code = code;
4504 /* And these are the types of the expressions. */
4505 tree lhs_type = type;
4506 tree rhs_type = type;
4507 int save = 0;
4508
4509 if (cond_first_p)
4510 {
4511 true_rhs = false_rhs = &arg;
4512 true_lhs = &true_value;
4513 false_lhs = &false_value;
4514 }
4515 else
4516 {
4517 true_lhs = false_lhs = &arg;
4518 true_rhs = &true_value;
4519 false_rhs = &false_value;
4520 }
4521
4522 if (TREE_CODE (cond) == COND_EXPR)
4523 {
4524 test = TREE_OPERAND (cond, 0);
4525 true_value = TREE_OPERAND (cond, 1);
4526 false_value = TREE_OPERAND (cond, 2);
4527 /* If this operand throws an expression, then it does not make
4528 sense to try to perform a logical or arithmetic operation
4529 involving it. Instead of building `a + throw 3' for example,
4530 we simply build `a, throw 3'. */
4531 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4532 {
4533 if (! cond_first_p)
4534 {
4535 lhs_code = COMPOUND_EXPR;
4536 lhs_type = void_type_node;
4537 }
4538 else
4539 lhs = true_value;
4540 }
4541 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4542 {
4543 if (! cond_first_p)
4544 {
4545 rhs_code = COMPOUND_EXPR;
4546 rhs_type = void_type_node;
4547 }
4548 else
4549 rhs = false_value;
4550 }
4551 }
4552 else
4553 {
4554 tree testtype = TREE_TYPE (cond);
4555 test = cond;
4556 true_value = convert (testtype, integer_one_node);
4557 false_value = convert (testtype, integer_zero_node);
4558 }
4559
4560 /* If ARG is complex we want to make sure we only evaluate it once. Though
4561 this is only required if it is volatile, it might be more efficient even
4562 if it is not. However, if we succeed in folding one part to a constant,
4563 we do not need to make this SAVE_EXPR. Since we do this optimization
4564 primarily to see if we do end up with constant and this SAVE_EXPR
4565 interferes with later optimizations, suppressing it when we can is
4566 important.
4567
4568 If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4569 do so. Don't try to see if the result is a constant if an arm is a
4570 COND_EXPR since we get exponential behavior in that case. */
4571
4572 if (saved_expr_p (arg))
4573 save = 1;
4574 else if (lhs == 0 && rhs == 0
4575 && !TREE_CONSTANT (arg)
4576 && (*lang_hooks.decls.global_bindings_p) () == 0
4577 && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4578 || TREE_SIDE_EFFECTS (arg)))
4579 {
4580 if (TREE_CODE (true_value) != COND_EXPR)
4581 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4582
4583 if (TREE_CODE (false_value) != COND_EXPR)
4584 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4585
4586 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4587 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4588 {
4589 arg = save_expr (arg);
4590 lhs = rhs = 0;
4591 save = 1;
4592 }
4593 }
4594
4595 if (lhs == 0)
4596 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4597 if (rhs == 0)
4598 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4599
4600 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4601
4602 if (save)
4603 return build (COMPOUND_EXPR, type,
4604 convert (void_type_node, arg),
4605 strip_compound_expr (test, arg));
4606 else
4607 return convert (type, test);
4608 }
4609
4610 \f
4611 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4612
4613 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4614 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4615 ADDEND is the same as X.
4616
4617 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4618 and finite. The problematic cases are when X is zero, and its mode
4619 has signed zeros. In the case of rounding towards -infinity,
4620 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4621 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4622
4623 static bool
4624 fold_real_zero_addition_p (tree type, tree addend, int negate)
4625 {
4626 if (!real_zerop (addend))
4627 return false;
4628
4629 /* Don't allow the fold with -fsignaling-nans. */
4630 if (HONOR_SNANS (TYPE_MODE (type)))
4631 return false;
4632
4633 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4634 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4635 return true;
4636
4637 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4638 if (TREE_CODE (addend) == REAL_CST
4639 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4640 negate = !negate;
4641
4642 /* The mode has signed zeros, and we have to honor their sign.
4643 In this situation, there is only one case we can return true for.
4644 X - 0 is the same as X unless rounding towards -infinity is
4645 supported. */
4646 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4647 }
4648
4649 /* Subroutine of fold() that checks comparisons of built-in math
4650 functions against real constants.
4651
4652 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
4653 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
4654 is the type of the result and ARG0 and ARG1 are the operands of the
4655 comparison. ARG1 must be a TREE_REAL_CST.
4656
4657 The function returns the constant folded tree if a simplification
4658 can be made, and NULL_TREE otherwise. */
4659
4660 static tree
4661 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
4662 tree type, tree arg0, tree arg1)
4663 {
4664 REAL_VALUE_TYPE c;
4665
4666 if (fcode == BUILT_IN_SQRT
4667 || fcode == BUILT_IN_SQRTF
4668 || fcode == BUILT_IN_SQRTL)
4669 {
4670 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
4671 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
4672
4673 c = TREE_REAL_CST (arg1);
4674 if (REAL_VALUE_NEGATIVE (c))
4675 {
4676 /* sqrt(x) < y is always false, if y is negative. */
4677 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
4678 return omit_one_operand (type,
4679 convert (type, integer_zero_node),
4680 arg);
4681
4682 /* sqrt(x) > y is always true, if y is negative and we
4683 don't care about NaNs, i.e. negative values of x. */
4684 if (code == NE_EXPR || !HONOR_NANS (mode))
4685 return omit_one_operand (type,
4686 convert (type, integer_one_node),
4687 arg);
4688
4689 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
4690 return fold (build (GE_EXPR, type, arg,
4691 build_real (TREE_TYPE (arg), dconst0)));
4692 }
4693 else if (code == GT_EXPR || code == GE_EXPR)
4694 {
4695 REAL_VALUE_TYPE c2;
4696
4697 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4698 real_convert (&c2, mode, &c2);
4699
4700 if (REAL_VALUE_ISINF (c2))
4701 {
4702 /* sqrt(x) > y is x == +Inf, when y is very large. */
4703 if (HONOR_INFINITIES (mode))
4704 return fold (build (EQ_EXPR, type, arg,
4705 build_real (TREE_TYPE (arg), c2)));
4706
4707 /* sqrt(x) > y is always false, when y is very large
4708 and we don't care about infinities. */
4709 return omit_one_operand (type,
4710 convert (type, integer_zero_node),
4711 arg);
4712 }
4713
4714 /* sqrt(x) > c is the same as x > c*c. */
4715 return fold (build (code, type, arg,
4716 build_real (TREE_TYPE (arg), c2)));
4717 }
4718 else if (code == LT_EXPR || code == LE_EXPR)
4719 {
4720 REAL_VALUE_TYPE c2;
4721
4722 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4723 real_convert (&c2, mode, &c2);
4724
4725 if (REAL_VALUE_ISINF (c2))
4726 {
4727 /* sqrt(x) < y is always true, when y is a very large
4728 value and we don't care about NaNs or Infinities. */
4729 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
4730 return omit_one_operand (type,
4731 convert (type, integer_one_node),
4732 arg);
4733
4734 /* sqrt(x) < y is x != +Inf when y is very large and we
4735 don't care about NaNs. */
4736 if (! HONOR_NANS (mode))
4737 return fold (build (NE_EXPR, type, arg,
4738 build_real (TREE_TYPE (arg), c2)));
4739
4740 /* sqrt(x) < y is x >= 0 when y is very large and we
4741 don't care about Infinities. */
4742 if (! HONOR_INFINITIES (mode))
4743 return fold (build (GE_EXPR, type, arg,
4744 build_real (TREE_TYPE (arg), dconst0)));
4745
4746 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
4747 if ((*lang_hooks.decls.global_bindings_p) () != 0
4748 || CONTAINS_PLACEHOLDER_P (arg))
4749 return NULL_TREE;
4750
4751 arg = save_expr (arg);
4752 return fold (build (TRUTH_ANDIF_EXPR, type,
4753 fold (build (GE_EXPR, type, arg,
4754 build_real (TREE_TYPE (arg),
4755 dconst0))),
4756 fold (build (NE_EXPR, type, arg,
4757 build_real (TREE_TYPE (arg),
4758 c2)))));
4759 }
4760
4761 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
4762 if (! HONOR_NANS (mode))
4763 return fold (build (code, type, arg,
4764 build_real (TREE_TYPE (arg), c2)));
4765
4766 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
4767 if ((*lang_hooks.decls.global_bindings_p) () == 0
4768 && ! CONTAINS_PLACEHOLDER_P (arg))
4769 {
4770 arg = save_expr (arg);
4771 return fold (build (TRUTH_ANDIF_EXPR, type,
4772 fold (build (GE_EXPR, type, arg,
4773 build_real (TREE_TYPE (arg),
4774 dconst0))),
4775 fold (build (code, type, arg,
4776 build_real (TREE_TYPE (arg),
4777 c2)))));
4778 }
4779 }
4780 }
4781
4782 return NULL_TREE;
4783 }
4784
4785 /* Subroutine of fold() that optimizes comparisons against Infinities,
4786 either +Inf or -Inf.
4787
4788 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
4789 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
4790 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
4791
4792 The function returns the constant folded tree if a simplification
4793 can be made, and NULL_TREE otherwise. */
4794
4795 static tree
4796 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
4797 {
4798 enum machine_mode mode;
4799 REAL_VALUE_TYPE max;
4800 tree temp;
4801 bool neg;
4802
4803 mode = TYPE_MODE (TREE_TYPE (arg0));
4804
4805 /* For negative infinity swap the sense of the comparison. */
4806 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
4807 if (neg)
4808 code = swap_tree_comparison (code);
4809
4810 switch (code)
4811 {
4812 case GT_EXPR:
4813 /* x > +Inf is always false, if with ignore sNANs. */
4814 if (HONOR_SNANS (mode))
4815 return NULL_TREE;
4816 return omit_one_operand (type,
4817 convert (type, integer_zero_node),
4818 arg0);
4819
4820 case LE_EXPR:
4821 /* x <= +Inf is always true, if we don't case about NaNs. */
4822 if (! HONOR_NANS (mode))
4823 return omit_one_operand (type,
4824 convert (type, integer_one_node),
4825 arg0);
4826
4827 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
4828 if ((*lang_hooks.decls.global_bindings_p) () == 0
4829 && ! CONTAINS_PLACEHOLDER_P (arg0))
4830 {
4831 arg0 = save_expr (arg0);
4832 return fold (build (EQ_EXPR, type, arg0, arg0));
4833 }
4834 break;
4835
4836 case EQ_EXPR:
4837 case GE_EXPR:
4838 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
4839 real_maxval (&max, neg, mode);
4840 return fold (build (neg ? LT_EXPR : GT_EXPR, type,
4841 arg0, build_real (TREE_TYPE (arg0), max)));
4842
4843 case LT_EXPR:
4844 /* x < +Inf is always equal to x <= DBL_MAX. */
4845 real_maxval (&max, neg, mode);
4846 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
4847 arg0, build_real (TREE_TYPE (arg0), max)));
4848
4849 case NE_EXPR:
4850 /* x != +Inf is always equal to !(x > DBL_MAX). */
4851 real_maxval (&max, neg, mode);
4852 if (! HONOR_NANS (mode))
4853 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
4854 arg0, build_real (TREE_TYPE (arg0), max)));
4855 temp = fold (build (neg ? LT_EXPR : GT_EXPR, type,
4856 arg0, build_real (TREE_TYPE (arg0), max)));
4857 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
4858
4859 default:
4860 break;
4861 }
4862
4863 return NULL_TREE;
4864 }
4865
4866 /* If CODE with arguments ARG0 and ARG1 represents a single bit
4867 equality/inequality test, then return a simplified form of
4868 the test using shifts and logical operations. Otherwise return
4869 NULL. TYPE is the desired result type. */
4870
4871 tree
4872 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
4873 tree result_type)
4874 {
4875 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
4876 operand 0. */
4877 if (code == TRUTH_NOT_EXPR)
4878 {
4879 code = TREE_CODE (arg0);
4880 if (code != NE_EXPR && code != EQ_EXPR)
4881 return NULL_TREE;
4882
4883 /* Extract the arguments of the EQ/NE. */
4884 arg1 = TREE_OPERAND (arg0, 1);
4885 arg0 = TREE_OPERAND (arg0, 0);
4886
4887 /* This requires us to invert the code. */
4888 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
4889 }
4890
4891 /* If this is testing a single bit, we can optimize the test. */
4892 if ((code == NE_EXPR || code == EQ_EXPR)
4893 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
4894 && integer_pow2p (TREE_OPERAND (arg0, 1)))
4895 {
4896 tree inner = TREE_OPERAND (arg0, 0);
4897 tree type = TREE_TYPE (arg0);
4898 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
4899 enum machine_mode operand_mode = TYPE_MODE (type);
4900 int ops_unsigned;
4901 tree signed_type, unsigned_type;
4902 tree arg00;
4903
4904 /* If we have (A & C) != 0 where C is the sign bit of A, convert
4905 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
4906 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
4907 if (arg00 != NULL_TREE)
4908 {
4909 tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
4910 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, result_type,
4911 convert (stype, arg00),
4912 convert (stype, integer_zero_node)));
4913 }
4914
4915 /* At this point, we know that arg0 is not testing the sign bit. */
4916 if (TYPE_PRECISION (type) - 1 == bitnum)
4917 abort ();
4918
4919 /* Otherwise we have (A & C) != 0 where C is a single bit,
4920 convert that into ((A >> C2) & 1). Where C2 = log2(C).
4921 Similarly for (A & C) == 0. */
4922
4923 /* If INNER is a right shift of a constant and it plus BITNUM does
4924 not overflow, adjust BITNUM and INNER. */
4925 if (TREE_CODE (inner) == RSHIFT_EXPR
4926 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
4927 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
4928 && bitnum < TYPE_PRECISION (type)
4929 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
4930 bitnum - TYPE_PRECISION (type)))
4931 {
4932 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
4933 inner = TREE_OPERAND (inner, 0);
4934 }
4935
4936 /* If we are going to be able to omit the AND below, we must do our
4937 operations as unsigned. If we must use the AND, we have a choice.
4938 Normally unsigned is faster, but for some machines signed is. */
4939 #ifdef LOAD_EXTEND_OP
4940 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
4941 #else
4942 ops_unsigned = 1;
4943 #endif
4944
4945 signed_type = (*lang_hooks.types.type_for_mode) (operand_mode, 0);
4946 unsigned_type = (*lang_hooks.types.type_for_mode) (operand_mode, 1);
4947
4948 if (bitnum != 0)
4949 inner = build (RSHIFT_EXPR, ops_unsigned ? unsigned_type : signed_type,
4950 inner, size_int (bitnum));
4951
4952 if (code == EQ_EXPR)
4953 inner = build (BIT_XOR_EXPR, ops_unsigned ? unsigned_type : signed_type,
4954 inner, integer_one_node);
4955
4956 /* Put the AND last so it can combine with more things. */
4957 inner = build (BIT_AND_EXPR, ops_unsigned ? unsigned_type : signed_type,
4958 inner, integer_one_node);
4959
4960 /* Make sure to return the proper type. */
4961 if (TREE_TYPE (inner) != result_type)
4962 inner = convert (result_type, inner);
4963
4964 return inner;
4965 }
4966 return NULL_TREE;
4967 }
4968
4969 /* Perform constant folding and related simplification of EXPR.
4970 The related simplifications include x*1 => x, x*0 => 0, etc.,
4971 and application of the associative law.
4972 NOP_EXPR conversions may be removed freely (as long as we
4973 are careful not to change the C type of the overall expression)
4974 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4975 but we can constant-fold them if they have constant operands. */
4976
4977 #ifdef ENABLE_FOLD_CHECKING
4978 # define fold(x) fold_1 (x)
4979 static tree fold_1 (tree);
4980 static
4981 #endif
4982 tree
4983 fold (tree expr)
4984 {
4985 tree t = expr, orig_t;
4986 tree t1 = NULL_TREE;
4987 tree tem;
4988 tree type = TREE_TYPE (expr);
4989 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4990 enum tree_code code = TREE_CODE (t);
4991 int kind = TREE_CODE_CLASS (code);
4992 int invert;
4993 /* WINS will be nonzero when the switch is done
4994 if all operands are constant. */
4995 int wins = 1;
4996
4997 /* Don't try to process an RTL_EXPR since its operands aren't trees.
4998 Likewise for a SAVE_EXPR that's already been evaluated. */
4999 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5000 return t;
5001
5002 /* Return right away if a constant. */
5003 if (kind == 'c')
5004 return t;
5005
5006 #ifdef MAX_INTEGER_COMPUTATION_MODE
5007 check_max_integer_computation_mode (expr);
5008 #endif
5009 orig_t = t;
5010
5011 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5012 {
5013 tree subop;
5014
5015 /* Special case for conversion ops that can have fixed point args. */
5016 arg0 = TREE_OPERAND (t, 0);
5017
5018 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5019 if (arg0 != 0)
5020 STRIP_SIGN_NOPS (arg0);
5021
5022 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5023 subop = TREE_REALPART (arg0);
5024 else
5025 subop = arg0;
5026
5027 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5028 && TREE_CODE (subop) != REAL_CST
5029 )
5030 /* Note that TREE_CONSTANT isn't enough:
5031 static var addresses are constant but we can't
5032 do arithmetic on them. */
5033 wins = 0;
5034 }
5035 else if (IS_EXPR_CODE_CLASS (kind) || kind == 'r')
5036 {
5037 int len = first_rtl_op (code);
5038 int i;
5039 for (i = 0; i < len; i++)
5040 {
5041 tree op = TREE_OPERAND (t, i);
5042 tree subop;
5043
5044 if (op == 0)
5045 continue; /* Valid for CALL_EXPR, at least. */
5046
5047 if (kind == '<' || code == RSHIFT_EXPR)
5048 {
5049 /* Signedness matters here. Perhaps we can refine this
5050 later. */
5051 STRIP_SIGN_NOPS (op);
5052 }
5053 else
5054 /* Strip any conversions that don't change the mode. */
5055 STRIP_NOPS (op);
5056
5057 if (TREE_CODE (op) == COMPLEX_CST)
5058 subop = TREE_REALPART (op);
5059 else
5060 subop = op;
5061
5062 if (TREE_CODE (subop) != INTEGER_CST
5063 && TREE_CODE (subop) != REAL_CST)
5064 /* Note that TREE_CONSTANT isn't enough:
5065 static var addresses are constant but we can't
5066 do arithmetic on them. */
5067 wins = 0;
5068
5069 if (i == 0)
5070 arg0 = op;
5071 else if (i == 1)
5072 arg1 = op;
5073 }
5074 }
5075
5076 /* If this is a commutative operation, and ARG0 is a constant, move it
5077 to ARG1 to reduce the number of tests below. */
5078 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
5079 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
5080 || code == BIT_AND_EXPR)
5081 && ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) != INTEGER_CST)
5082 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) != REAL_CST)))
5083 {
5084 tem = arg0; arg0 = arg1; arg1 = tem;
5085
5086 if (t == orig_t)
5087 t = copy_node (t);
5088 TREE_OPERAND (t, 0) = arg0;
5089 TREE_OPERAND (t, 1) = arg1;
5090 }
5091
5092 /* Now WINS is set as described above,
5093 ARG0 is the first operand of EXPR,
5094 and ARG1 is the second operand (if it has more than one operand).
5095
5096 First check for cases where an arithmetic operation is applied to a
5097 compound, conditional, or comparison operation. Push the arithmetic
5098 operation inside the compound or conditional to see if any folding
5099 can then be done. Convert comparison to conditional for this purpose.
5100 The also optimizes non-constant cases that used to be done in
5101 expand_expr.
5102
5103 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5104 one of the operands is a comparison and the other is a comparison, a
5105 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5106 code below would make the expression more complex. Change it to a
5107 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5108 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5109
5110 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5111 || code == EQ_EXPR || code == NE_EXPR)
5112 && ((truth_value_p (TREE_CODE (arg0))
5113 && (truth_value_p (TREE_CODE (arg1))
5114 || (TREE_CODE (arg1) == BIT_AND_EXPR
5115 && integer_onep (TREE_OPERAND (arg1, 1)))))
5116 || (truth_value_p (TREE_CODE (arg1))
5117 && (truth_value_p (TREE_CODE (arg0))
5118 || (TREE_CODE (arg0) == BIT_AND_EXPR
5119 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5120 {
5121 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5122 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5123 : TRUTH_XOR_EXPR,
5124 type, arg0, arg1));
5125
5126 if (code == EQ_EXPR)
5127 t = invert_truthvalue (t);
5128
5129 return t;
5130 }
5131
5132 if (TREE_CODE_CLASS (code) == '1')
5133 {
5134 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5135 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5136 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5137 else if (TREE_CODE (arg0) == COND_EXPR)
5138 {
5139 tree arg01 = TREE_OPERAND (arg0, 1);
5140 tree arg02 = TREE_OPERAND (arg0, 2);
5141 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5142 arg01 = fold (build1 (code, type, arg01));
5143 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5144 arg02 = fold (build1 (code, type, arg02));
5145 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5146 arg01, arg02));
5147
5148 /* If this was a conversion, and all we did was to move into
5149 inside the COND_EXPR, bring it back out. But leave it if
5150 it is a conversion from integer to integer and the
5151 result precision is no wider than a word since such a
5152 conversion is cheap and may be optimized away by combine,
5153 while it couldn't if it were outside the COND_EXPR. Then return
5154 so we don't get into an infinite recursion loop taking the
5155 conversion out and then back in. */
5156
5157 if ((code == NOP_EXPR || code == CONVERT_EXPR
5158 || code == NON_LVALUE_EXPR)
5159 && TREE_CODE (t) == COND_EXPR
5160 && TREE_CODE (TREE_OPERAND (t, 1)) == code
5161 && TREE_CODE (TREE_OPERAND (t, 2)) == code
5162 && ! VOID_TYPE_P (TREE_OPERAND (t, 1))
5163 && ! VOID_TYPE_P (TREE_OPERAND (t, 2))
5164 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
5165 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
5166 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
5167 && (INTEGRAL_TYPE_P
5168 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
5169 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
5170 t = build1 (code, type,
5171 build (COND_EXPR,
5172 TREE_TYPE (TREE_OPERAND
5173 (TREE_OPERAND (t, 1), 0)),
5174 TREE_OPERAND (t, 0),
5175 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
5176 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
5177 return t;
5178 }
5179 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5180 return fold (build (COND_EXPR, type, arg0,
5181 fold (build1 (code, type, integer_one_node)),
5182 fold (build1 (code, type, integer_zero_node))));
5183 }
5184 else if (TREE_CODE_CLASS (code) == '<'
5185 && TREE_CODE (arg0) == COMPOUND_EXPR)
5186 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5187 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5188 else if (TREE_CODE_CLASS (code) == '<'
5189 && TREE_CODE (arg1) == COMPOUND_EXPR)
5190 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5191 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5192 else if (TREE_CODE_CLASS (code) == '2'
5193 || TREE_CODE_CLASS (code) == '<')
5194 {
5195 if (TREE_CODE (arg1) == COMPOUND_EXPR
5196 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1, 0))
5197 && ! TREE_SIDE_EFFECTS (arg0))
5198 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5199 fold (build (code, type,
5200 arg0, TREE_OPERAND (arg1, 1))));
5201 else if ((TREE_CODE (arg1) == COND_EXPR
5202 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
5203 && TREE_CODE_CLASS (code) != '<'))
5204 && (TREE_CODE (arg0) != COND_EXPR
5205 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5206 && (! TREE_SIDE_EFFECTS (arg0)
5207 || ((*lang_hooks.decls.global_bindings_p) () == 0
5208 && ! CONTAINS_PLACEHOLDER_P (arg0))))
5209 return
5210 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5211 /*cond_first_p=*/0);
5212 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
5213 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5214 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5215 else if ((TREE_CODE (arg0) == COND_EXPR
5216 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5217 && TREE_CODE_CLASS (code) != '<'))
5218 && (TREE_CODE (arg1) != COND_EXPR
5219 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5220 && (! TREE_SIDE_EFFECTS (arg1)
5221 || ((*lang_hooks.decls.global_bindings_p) () == 0
5222 && ! CONTAINS_PLACEHOLDER_P (arg1))))
5223 return
5224 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5225 /*cond_first_p=*/1);
5226 }
5227
5228 switch (code)
5229 {
5230 case INTEGER_CST:
5231 case REAL_CST:
5232 case VECTOR_CST:
5233 case STRING_CST:
5234 case COMPLEX_CST:
5235 case CONSTRUCTOR:
5236 return t;
5237
5238 case CONST_DECL:
5239 return fold (DECL_INITIAL (t));
5240
5241 case NOP_EXPR:
5242 case FLOAT_EXPR:
5243 case CONVERT_EXPR:
5244 case FIX_TRUNC_EXPR:
5245 /* Other kinds of FIX are not handled properly by fold_convert. */
5246
5247 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
5248 return TREE_OPERAND (t, 0);
5249
5250 /* Handle cases of two conversions in a row. */
5251 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5252 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5253 {
5254 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5255 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5256 tree final_type = TREE_TYPE (t);
5257 int inside_int = INTEGRAL_TYPE_P (inside_type);
5258 int inside_ptr = POINTER_TYPE_P (inside_type);
5259 int inside_float = FLOAT_TYPE_P (inside_type);
5260 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5261 int inside_unsignedp = TREE_UNSIGNED (inside_type);
5262 int inter_int = INTEGRAL_TYPE_P (inter_type);
5263 int inter_ptr = POINTER_TYPE_P (inter_type);
5264 int inter_float = FLOAT_TYPE_P (inter_type);
5265 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5266 int inter_unsignedp = TREE_UNSIGNED (inter_type);
5267 int final_int = INTEGRAL_TYPE_P (final_type);
5268 int final_ptr = POINTER_TYPE_P (final_type);
5269 int final_float = FLOAT_TYPE_P (final_type);
5270 unsigned int final_prec = TYPE_PRECISION (final_type);
5271 int final_unsignedp = TREE_UNSIGNED (final_type);
5272
5273 /* In addition to the cases of two conversions in a row
5274 handled below, if we are converting something to its own
5275 type via an object of identical or wider precision, neither
5276 conversion is needed. */
5277 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
5278 && ((inter_int && final_int) || (inter_float && final_float))
5279 && inter_prec >= final_prec)
5280 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5281
5282 /* Likewise, if the intermediate and final types are either both
5283 float or both integer, we don't need the middle conversion if
5284 it is wider than the final type and doesn't change the signedness
5285 (for integers). Avoid this if the final type is a pointer
5286 since then we sometimes need the inner conversion. Likewise if
5287 the outer has a precision not equal to the size of its mode. */
5288 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5289 || (inter_float && inside_float))
5290 && inter_prec >= inside_prec
5291 && (inter_float || inter_unsignedp == inside_unsignedp)
5292 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5293 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5294 && ! final_ptr)
5295 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5296
5297 /* If we have a sign-extension of a zero-extended value, we can
5298 replace that by a single zero-extension. */
5299 if (inside_int && inter_int && final_int
5300 && inside_prec < inter_prec && inter_prec < final_prec
5301 && inside_unsignedp && !inter_unsignedp)
5302 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5303
5304 /* Two conversions in a row are not needed unless:
5305 - some conversion is floating-point (overstrict for now), or
5306 - the intermediate type is narrower than both initial and
5307 final, or
5308 - the intermediate type and innermost type differ in signedness,
5309 and the outermost type is wider than the intermediate, or
5310 - the initial type is a pointer type and the precisions of the
5311 intermediate and final types differ, or
5312 - the final type is a pointer type and the precisions of the
5313 initial and intermediate types differ. */
5314 if (! inside_float && ! inter_float && ! final_float
5315 && (inter_prec > inside_prec || inter_prec > final_prec)
5316 && ! (inside_int && inter_int
5317 && inter_unsignedp != inside_unsignedp
5318 && inter_prec < final_prec)
5319 && ((inter_unsignedp && inter_prec > inside_prec)
5320 == (final_unsignedp && final_prec > inter_prec))
5321 && ! (inside_ptr && inter_prec != final_prec)
5322 && ! (final_ptr && inside_prec != inter_prec)
5323 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5324 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5325 && ! final_ptr)
5326 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5327 }
5328
5329 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5330 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5331 /* Detect assigning a bitfield. */
5332 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5333 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5334 {
5335 /* Don't leave an assignment inside a conversion
5336 unless assigning a bitfield. */
5337 tree prev = TREE_OPERAND (t, 0);
5338 if (t == orig_t)
5339 t = copy_node (t);
5340 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
5341 /* First do the assignment, then return converted constant. */
5342 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
5343 TREE_USED (t) = 1;
5344 return t;
5345 }
5346
5347 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5348 constants (if x has signed type, the sign bit cannot be set
5349 in c). This folds extension into the BIT_AND_EXPR. */
5350 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
5351 && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
5352 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5353 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5354 {
5355 tree and = TREE_OPERAND (t, 0);
5356 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5357 int change = 0;
5358
5359 if (TREE_UNSIGNED (TREE_TYPE (and))
5360 || (TYPE_PRECISION (TREE_TYPE (t))
5361 <= TYPE_PRECISION (TREE_TYPE (and))))
5362 change = 1;
5363 else if (TYPE_PRECISION (TREE_TYPE (and1))
5364 <= HOST_BITS_PER_WIDE_INT
5365 && host_integerp (and1, 1))
5366 {
5367 unsigned HOST_WIDE_INT cst;
5368
5369 cst = tree_low_cst (and1, 1);
5370 cst &= (HOST_WIDE_INT) -1
5371 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5372 change = (cst == 0);
5373 #ifdef LOAD_EXTEND_OP
5374 if (change
5375 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5376 == ZERO_EXTEND))
5377 {
5378 tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
5379 and0 = convert (uns, and0);
5380 and1 = convert (uns, and1);
5381 }
5382 #endif
5383 }
5384 if (change)
5385 return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
5386 convert (TREE_TYPE (t), and0),
5387 convert (TREE_TYPE (t), and1)));
5388 }
5389
5390 if (!wins)
5391 {
5392 if (TREE_CONSTANT (t) != TREE_CONSTANT (arg0))
5393 {
5394 if (t == orig_t)
5395 t = copy_node (t);
5396 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
5397 }
5398 return t;
5399 }
5400 return fold_convert (t, arg0);
5401
5402 case VIEW_CONVERT_EXPR:
5403 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5404 return build1 (VIEW_CONVERT_EXPR, type,
5405 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5406 return t;
5407
5408 case COMPONENT_REF:
5409 if (TREE_CODE (arg0) == CONSTRUCTOR
5410 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
5411 {
5412 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5413 if (m)
5414 t = TREE_VALUE (m);
5415 }
5416 return t;
5417
5418 case RANGE_EXPR:
5419 if (TREE_CONSTANT (t) != wins)
5420 {
5421 if (t == orig_t)
5422 t = copy_node (t);
5423 TREE_CONSTANT (t) = wins;
5424 }
5425 return t;
5426
5427 case NEGATE_EXPR:
5428 if (wins)
5429 {
5430 if (TREE_CODE (arg0) == INTEGER_CST)
5431 {
5432 unsigned HOST_WIDE_INT low;
5433 HOST_WIDE_INT high;
5434 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5435 TREE_INT_CST_HIGH (arg0),
5436 &low, &high);
5437 t = build_int_2 (low, high);
5438 TREE_TYPE (t) = type;
5439 TREE_OVERFLOW (t)
5440 = (TREE_OVERFLOW (arg0)
5441 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
5442 TREE_CONSTANT_OVERFLOW (t)
5443 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5444 }
5445 else if (TREE_CODE (arg0) == REAL_CST)
5446 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5447 }
5448 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5449 return TREE_OPERAND (arg0, 0);
5450 /* Convert -((double)float) into (double)(-float). */
5451 else if (TREE_CODE (arg0) == NOP_EXPR
5452 && TREE_CODE (type) == REAL_TYPE)
5453 {
5454 tree targ0 = strip_float_extensions (arg0);
5455 if (targ0 != arg0)
5456 return convert (type, build1 (NEGATE_EXPR, TREE_TYPE (targ0), targ0));
5457
5458 }
5459
5460 /* Convert - (a - b) to (b - a) for non-floating-point. */
5461 else if (TREE_CODE (arg0) == MINUS_EXPR
5462 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
5463 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
5464 TREE_OPERAND (arg0, 0));
5465
5466 /* Convert -f(x) into f(-x) where f is sin, tan or atan. */
5467 switch (builtin_mathfn_code (arg0))
5468 {
5469 case BUILT_IN_SIN:
5470 case BUILT_IN_SINF:
5471 case BUILT_IN_SINL:
5472 case BUILT_IN_TAN:
5473 case BUILT_IN_TANF:
5474 case BUILT_IN_TANL:
5475 case BUILT_IN_ATAN:
5476 case BUILT_IN_ATANF:
5477 case BUILT_IN_ATANL:
5478 if (negate_expr_p (TREE_VALUE (TREE_OPERAND (arg0, 1))))
5479 {
5480 tree fndecl, arg, arglist;
5481
5482 fndecl = get_callee_fndecl (arg0);
5483 arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5484 arg = fold (build1 (NEGATE_EXPR, type, arg));
5485 arglist = build_tree_list (NULL_TREE, arg);
5486 return build_function_call_expr (fndecl, arglist);
5487 }
5488 break;
5489
5490 default:
5491 break;
5492 }
5493 return t;
5494
5495 case ABS_EXPR:
5496 if (wins)
5497 {
5498 if (TREE_CODE (arg0) == INTEGER_CST)
5499 {
5500 /* If the value is unsigned, then the absolute value is
5501 the same as the ordinary value. */
5502 if (TREE_UNSIGNED (type))
5503 return arg0;
5504 /* Similarly, if the value is non-negative. */
5505 else if (INT_CST_LT (integer_minus_one_node, arg0))
5506 return arg0;
5507 /* If the value is negative, then the absolute value is
5508 its negation. */
5509 else
5510 {
5511 unsigned HOST_WIDE_INT low;
5512 HOST_WIDE_INT high;
5513 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5514 TREE_INT_CST_HIGH (arg0),
5515 &low, &high);
5516 t = build_int_2 (low, high);
5517 TREE_TYPE (t) = type;
5518 TREE_OVERFLOW (t)
5519 = (TREE_OVERFLOW (arg0)
5520 | force_fit_type (t, overflow));
5521 TREE_CONSTANT_OVERFLOW (t)
5522 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5523 }
5524 }
5525 else if (TREE_CODE (arg0) == REAL_CST)
5526 {
5527 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5528 t = build_real (type,
5529 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5530 }
5531 }
5532 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5533 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
5534 /* Convert fabs((double)float) into (double)fabsf(float). */
5535 else if (TREE_CODE (arg0) == NOP_EXPR
5536 && TREE_CODE (type) == REAL_TYPE)
5537 {
5538 tree targ0 = strip_float_extensions (arg0);
5539 if (targ0 != arg0)
5540 return convert (type, fold (build1 (ABS_EXPR, TREE_TYPE (targ0),
5541 targ0)));
5542 }
5543 else if (tree_expr_nonnegative_p (arg0))
5544 return arg0;
5545 return t;
5546
5547 case CONJ_EXPR:
5548 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5549 return convert (type, arg0);
5550 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5551 return build (COMPLEX_EXPR, type,
5552 TREE_OPERAND (arg0, 0),
5553 negate_expr (TREE_OPERAND (arg0, 1)));
5554 else if (TREE_CODE (arg0) == COMPLEX_CST)
5555 return build_complex (type, TREE_REALPART (arg0),
5556 negate_expr (TREE_IMAGPART (arg0)));
5557 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5558 return fold (build (TREE_CODE (arg0), type,
5559 fold (build1 (CONJ_EXPR, type,
5560 TREE_OPERAND (arg0, 0))),
5561 fold (build1 (CONJ_EXPR,
5562 type, TREE_OPERAND (arg0, 1)))));
5563 else if (TREE_CODE (arg0) == CONJ_EXPR)
5564 return TREE_OPERAND (arg0, 0);
5565 return t;
5566
5567 case BIT_NOT_EXPR:
5568 if (wins)
5569 {
5570 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5571 ~ TREE_INT_CST_HIGH (arg0));
5572 TREE_TYPE (t) = type;
5573 force_fit_type (t, 0);
5574 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5575 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5576 }
5577 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5578 return TREE_OPERAND (arg0, 0);
5579 return t;
5580
5581 case PLUS_EXPR:
5582 /* A + (-B) -> A - B */
5583 if (TREE_CODE (arg1) == NEGATE_EXPR)
5584 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5585 /* (-A) + B -> B - A */
5586 if (TREE_CODE (arg0) == NEGATE_EXPR)
5587 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5588 else if (! FLOAT_TYPE_P (type))
5589 {
5590 if (integer_zerop (arg1))
5591 return non_lvalue (convert (type, arg0));
5592
5593 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5594 with a constant, and the two constants have no bits in common,
5595 we should treat this as a BIT_IOR_EXPR since this may produce more
5596 simplifications. */
5597 if (TREE_CODE (arg0) == BIT_AND_EXPR
5598 && TREE_CODE (arg1) == BIT_AND_EXPR
5599 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5600 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5601 && integer_zerop (const_binop (BIT_AND_EXPR,
5602 TREE_OPERAND (arg0, 1),
5603 TREE_OPERAND (arg1, 1), 0)))
5604 {
5605 code = BIT_IOR_EXPR;
5606 goto bit_ior;
5607 }
5608
5609 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5610 (plus (plus (mult) (mult)) (foo)) so that we can
5611 take advantage of the factoring cases below. */
5612 if ((TREE_CODE (arg0) == PLUS_EXPR
5613 && TREE_CODE (arg1) == MULT_EXPR)
5614 || (TREE_CODE (arg1) == PLUS_EXPR
5615 && TREE_CODE (arg0) == MULT_EXPR))
5616 {
5617 tree parg0, parg1, parg, marg;
5618
5619 if (TREE_CODE (arg0) == PLUS_EXPR)
5620 parg = arg0, marg = arg1;
5621 else
5622 parg = arg1, marg = arg0;
5623 parg0 = TREE_OPERAND (parg, 0);
5624 parg1 = TREE_OPERAND (parg, 1);
5625 STRIP_NOPS (parg0);
5626 STRIP_NOPS (parg1);
5627
5628 if (TREE_CODE (parg0) == MULT_EXPR
5629 && TREE_CODE (parg1) != MULT_EXPR)
5630 return fold (build (PLUS_EXPR, type,
5631 fold (build (PLUS_EXPR, type,
5632 convert (type, parg0),
5633 convert (type, marg))),
5634 convert (type, parg1)));
5635 if (TREE_CODE (parg0) != MULT_EXPR
5636 && TREE_CODE (parg1) == MULT_EXPR)
5637 return fold (build (PLUS_EXPR, type,
5638 fold (build (PLUS_EXPR, type,
5639 convert (type, parg1),
5640 convert (type, marg))),
5641 convert (type, parg0)));
5642 }
5643
5644 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5645 {
5646 tree arg00, arg01, arg10, arg11;
5647 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5648
5649 /* (A * C) + (B * C) -> (A+B) * C.
5650 We are most concerned about the case where C is a constant,
5651 but other combinations show up during loop reduction. Since
5652 it is not difficult, try all four possibilities. */
5653
5654 arg00 = TREE_OPERAND (arg0, 0);
5655 arg01 = TREE_OPERAND (arg0, 1);
5656 arg10 = TREE_OPERAND (arg1, 0);
5657 arg11 = TREE_OPERAND (arg1, 1);
5658 same = NULL_TREE;
5659
5660 if (operand_equal_p (arg01, arg11, 0))
5661 same = arg01, alt0 = arg00, alt1 = arg10;
5662 else if (operand_equal_p (arg00, arg10, 0))
5663 same = arg00, alt0 = arg01, alt1 = arg11;
5664 else if (operand_equal_p (arg00, arg11, 0))
5665 same = arg00, alt0 = arg01, alt1 = arg10;
5666 else if (operand_equal_p (arg01, arg10, 0))
5667 same = arg01, alt0 = arg00, alt1 = arg11;
5668
5669 /* No identical multiplicands; see if we can find a common
5670 power-of-two factor in non-power-of-two multiplies. This
5671 can help in multi-dimensional array access. */
5672 else if (TREE_CODE (arg01) == INTEGER_CST
5673 && TREE_CODE (arg11) == INTEGER_CST
5674 && TREE_INT_CST_HIGH (arg01) == 0
5675 && TREE_INT_CST_HIGH (arg11) == 0)
5676 {
5677 HOST_WIDE_INT int01, int11, tmp;
5678 int01 = TREE_INT_CST_LOW (arg01);
5679 int11 = TREE_INT_CST_LOW (arg11);
5680
5681 /* Move min of absolute values to int11. */
5682 if ((int01 >= 0 ? int01 : -int01)
5683 < (int11 >= 0 ? int11 : -int11))
5684 {
5685 tmp = int01, int01 = int11, int11 = tmp;
5686 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5687 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5688 }
5689
5690 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5691 {
5692 alt0 = fold (build (MULT_EXPR, type, arg00,
5693 build_int_2 (int01 / int11, 0)));
5694 alt1 = arg10;
5695 same = arg11;
5696 }
5697 }
5698
5699 if (same)
5700 return fold (build (MULT_EXPR, type,
5701 fold (build (PLUS_EXPR, type, alt0, alt1)),
5702 same));
5703 }
5704 }
5705 else
5706 {
5707 /* See if ARG1 is zero and X + ARG1 reduces to X. */
5708 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
5709 return non_lvalue (convert (type, arg0));
5710
5711 /* Likewise if the operands are reversed. */
5712 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5713 return non_lvalue (convert (type, arg1));
5714
5715 /* Convert x+x into x*2.0. */
5716 if (operand_equal_p (arg0, arg1, 0))
5717 return fold (build (MULT_EXPR, type, arg0,
5718 build_real (type, dconst2)));
5719
5720 /* Convert x*c+x into x*(c+1). */
5721 if (flag_unsafe_math_optimizations
5722 && TREE_CODE (arg0) == MULT_EXPR
5723 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
5724 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
5725 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
5726 {
5727 REAL_VALUE_TYPE c;
5728
5729 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
5730 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
5731 return fold (build (MULT_EXPR, type, arg1,
5732 build_real (type, c)));
5733 }
5734
5735 /* Convert x+x*c into x*(c+1). */
5736 if (flag_unsafe_math_optimizations
5737 && TREE_CODE (arg1) == MULT_EXPR
5738 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
5739 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
5740 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
5741 {
5742 REAL_VALUE_TYPE c;
5743
5744 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
5745 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
5746 return fold (build (MULT_EXPR, type, arg0,
5747 build_real (type, c)));
5748 }
5749
5750 /* Convert x*c1+x*c2 into x*(c1+c2). */
5751 if (flag_unsafe_math_optimizations
5752 && TREE_CODE (arg0) == MULT_EXPR
5753 && TREE_CODE (arg1) == MULT_EXPR
5754 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
5755 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
5756 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
5757 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
5758 && operand_equal_p (TREE_OPERAND (arg0, 0),
5759 TREE_OPERAND (arg1, 0), 0))
5760 {
5761 REAL_VALUE_TYPE c1, c2;
5762
5763 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
5764 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
5765 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
5766 return fold (build (MULT_EXPR, type,
5767 TREE_OPERAND (arg0, 0),
5768 build_real (type, c1)));
5769 }
5770 }
5771
5772 bit_rotate:
5773 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5774 is a rotate of A by C1 bits. */
5775 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5776 is a rotate of A by B bits. */
5777 {
5778 enum tree_code code0, code1;
5779 code0 = TREE_CODE (arg0);
5780 code1 = TREE_CODE (arg1);
5781 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5782 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5783 && operand_equal_p (TREE_OPERAND (arg0, 0),
5784 TREE_OPERAND (arg1, 0), 0)
5785 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5786 {
5787 tree tree01, tree11;
5788 enum tree_code code01, code11;
5789
5790 tree01 = TREE_OPERAND (arg0, 1);
5791 tree11 = TREE_OPERAND (arg1, 1);
5792 STRIP_NOPS (tree01);
5793 STRIP_NOPS (tree11);
5794 code01 = TREE_CODE (tree01);
5795 code11 = TREE_CODE (tree11);
5796 if (code01 == INTEGER_CST
5797 && code11 == INTEGER_CST
5798 && TREE_INT_CST_HIGH (tree01) == 0
5799 && TREE_INT_CST_HIGH (tree11) == 0
5800 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5801 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5802 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5803 code0 == LSHIFT_EXPR ? tree01 : tree11);
5804 else if (code11 == MINUS_EXPR)
5805 {
5806 tree tree110, tree111;
5807 tree110 = TREE_OPERAND (tree11, 0);
5808 tree111 = TREE_OPERAND (tree11, 1);
5809 STRIP_NOPS (tree110);
5810 STRIP_NOPS (tree111);
5811 if (TREE_CODE (tree110) == INTEGER_CST
5812 && 0 == compare_tree_int (tree110,
5813 TYPE_PRECISION
5814 (TREE_TYPE (TREE_OPERAND
5815 (arg0, 0))))
5816 && operand_equal_p (tree01, tree111, 0))
5817 return build ((code0 == LSHIFT_EXPR
5818 ? LROTATE_EXPR
5819 : RROTATE_EXPR),
5820 type, TREE_OPERAND (arg0, 0), tree01);
5821 }
5822 else if (code01 == MINUS_EXPR)
5823 {
5824 tree tree010, tree011;
5825 tree010 = TREE_OPERAND (tree01, 0);
5826 tree011 = TREE_OPERAND (tree01, 1);
5827 STRIP_NOPS (tree010);
5828 STRIP_NOPS (tree011);
5829 if (TREE_CODE (tree010) == INTEGER_CST
5830 && 0 == compare_tree_int (tree010,
5831 TYPE_PRECISION
5832 (TREE_TYPE (TREE_OPERAND
5833 (arg0, 0))))
5834 && operand_equal_p (tree11, tree011, 0))
5835 return build ((code0 != LSHIFT_EXPR
5836 ? LROTATE_EXPR
5837 : RROTATE_EXPR),
5838 type, TREE_OPERAND (arg0, 0), tree11);
5839 }
5840 }
5841 }
5842
5843 associate:
5844 /* In most languages, can't associate operations on floats through
5845 parentheses. Rather than remember where the parentheses were, we
5846 don't associate floats at all, unless the user has specified
5847 -funsafe-math-optimizations. */
5848
5849 if (! wins
5850 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
5851 {
5852 tree var0, con0, lit0, minus_lit0;
5853 tree var1, con1, lit1, minus_lit1;
5854
5855 /* Split both trees into variables, constants, and literals. Then
5856 associate each group together, the constants with literals,
5857 then the result with variables. This increases the chances of
5858 literals being recombined later and of generating relocatable
5859 expressions for the sum of a constant and literal. */
5860 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
5861 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
5862 code == MINUS_EXPR);
5863
5864 /* Only do something if we found more than two objects. Otherwise,
5865 nothing has changed and we risk infinite recursion. */
5866 if (2 < ((var0 != 0) + (var1 != 0)
5867 + (con0 != 0) + (con1 != 0)
5868 + (lit0 != 0) + (lit1 != 0)
5869 + (minus_lit0 != 0) + (minus_lit1 != 0)))
5870 {
5871 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
5872 if (code == MINUS_EXPR)
5873 code = PLUS_EXPR;
5874
5875 var0 = associate_trees (var0, var1, code, type);
5876 con0 = associate_trees (con0, con1, code, type);
5877 lit0 = associate_trees (lit0, lit1, code, type);
5878 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
5879
5880 /* Preserve the MINUS_EXPR if the negative part of the literal is
5881 greater than the positive part. Otherwise, the multiplicative
5882 folding code (i.e extract_muldiv) may be fooled in case
5883 unsigned constants are subtracted, like in the following
5884 example: ((X*2 + 4) - 8U)/2. */
5885 if (minus_lit0 && lit0)
5886 {
5887 if (tree_int_cst_lt (lit0, minus_lit0))
5888 {
5889 minus_lit0 = associate_trees (minus_lit0, lit0,
5890 MINUS_EXPR, type);
5891 lit0 = 0;
5892 }
5893 else
5894 {
5895 lit0 = associate_trees (lit0, minus_lit0,
5896 MINUS_EXPR, type);
5897 minus_lit0 = 0;
5898 }
5899 }
5900 if (minus_lit0)
5901 {
5902 if (con0 == 0)
5903 return convert (type, associate_trees (var0, minus_lit0,
5904 MINUS_EXPR, type));
5905 else
5906 {
5907 con0 = associate_trees (con0, minus_lit0,
5908 MINUS_EXPR, type);
5909 return convert (type, associate_trees (var0, con0,
5910 PLUS_EXPR, type));
5911 }
5912 }
5913
5914 con0 = associate_trees (con0, lit0, code, type);
5915 return convert (type, associate_trees (var0, con0, code, type));
5916 }
5917 }
5918
5919 binary:
5920 if (wins)
5921 t1 = const_binop (code, arg0, arg1, 0);
5922 if (t1 != NULL_TREE)
5923 {
5924 /* The return value should always have
5925 the same type as the original expression. */
5926 if (TREE_TYPE (t1) != TREE_TYPE (t))
5927 t1 = convert (TREE_TYPE (t), t1);
5928
5929 return t1;
5930 }
5931 return t;
5932
5933 case MINUS_EXPR:
5934 /* A - (-B) -> A + B */
5935 if (TREE_CODE (arg1) == NEGATE_EXPR)
5936 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5937 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
5938 if (TREE_CODE (arg0) == NEGATE_EXPR
5939 && (FLOAT_TYPE_P (type)
5940 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
5941 && negate_expr_p (arg1)
5942 && (! TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
5943 && (! TREE_SIDE_EFFECTS (arg1) || TREE_CONSTANT (arg0)))
5944 return fold (build (MINUS_EXPR, type, negate_expr (arg1),
5945 TREE_OPERAND (arg0, 0)));
5946
5947 if (! FLOAT_TYPE_P (type))
5948 {
5949 if (! wins && integer_zerop (arg0))
5950 return negate_expr (convert (type, arg1));
5951 if (integer_zerop (arg1))
5952 return non_lvalue (convert (type, arg0));
5953
5954 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
5955 about the case where C is a constant, just try one of the
5956 four possibilities. */
5957
5958 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
5959 && operand_equal_p (TREE_OPERAND (arg0, 1),
5960 TREE_OPERAND (arg1, 1), 0))
5961 return fold (build (MULT_EXPR, type,
5962 fold (build (MINUS_EXPR, type,
5963 TREE_OPERAND (arg0, 0),
5964 TREE_OPERAND (arg1, 0))),
5965 TREE_OPERAND (arg0, 1)));
5966
5967 /* Fold A - (A & B) into ~B & A. */
5968 if (!TREE_SIDE_EFFECTS (arg0)
5969 && TREE_CODE (arg1) == BIT_AND_EXPR)
5970 {
5971 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
5972 return fold (build (BIT_AND_EXPR, type,
5973 fold (build1 (BIT_NOT_EXPR, type,
5974 TREE_OPERAND (arg1, 0))),
5975 arg0));
5976 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
5977 return fold (build (BIT_AND_EXPR, type,
5978 fold (build1 (BIT_NOT_EXPR, type,
5979 TREE_OPERAND (arg1, 1))),
5980 arg0));
5981 }
5982 }
5983
5984 /* See if ARG1 is zero and X - ARG1 reduces to X. */
5985 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
5986 return non_lvalue (convert (type, arg0));
5987
5988 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
5989 ARG0 is zero and X + ARG0 reduces to X, since that would mean
5990 (-ARG1 + ARG0) reduces to -ARG1. */
5991 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5992 return negate_expr (convert (type, arg1));
5993
5994 /* Fold &x - &x. This can happen from &x.foo - &x.
5995 This is unsafe for certain floats even in non-IEEE formats.
5996 In IEEE, it is unsafe because it does wrong for NaNs.
5997 Also note that operand_equal_p is always false if an operand
5998 is volatile. */
5999
6000 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6001 && operand_equal_p (arg0, arg1, 0))
6002 return convert (type, integer_zero_node);
6003
6004 goto associate;
6005
6006 case MULT_EXPR:
6007 /* (-A) * (-B) -> A * B */
6008 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6009 return fold (build (MULT_EXPR, type,
6010 TREE_OPERAND (arg0, 0),
6011 negate_expr (arg1)));
6012 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6013 return fold (build (MULT_EXPR, type,
6014 negate_expr (arg0),
6015 TREE_OPERAND (arg1, 0)));
6016
6017 if (! FLOAT_TYPE_P (type))
6018 {
6019 if (integer_zerop (arg1))
6020 return omit_one_operand (type, arg1, arg0);
6021 if (integer_onep (arg1))
6022 return non_lvalue (convert (type, arg0));
6023
6024 /* (a * (1 << b)) is (a << b) */
6025 if (TREE_CODE (arg1) == LSHIFT_EXPR
6026 && integer_onep (TREE_OPERAND (arg1, 0)))
6027 return fold (build (LSHIFT_EXPR, type, arg0,
6028 TREE_OPERAND (arg1, 1)));
6029 if (TREE_CODE (arg0) == LSHIFT_EXPR
6030 && integer_onep (TREE_OPERAND (arg0, 0)))
6031 return fold (build (LSHIFT_EXPR, type, arg1,
6032 TREE_OPERAND (arg0, 1)));
6033
6034 if (TREE_CODE (arg1) == INTEGER_CST
6035 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6036 convert (type, arg1),
6037 code, NULL_TREE)))
6038 return convert (type, tem);
6039
6040 }
6041 else
6042 {
6043 /* Maybe fold x * 0 to 0. The expressions aren't the same
6044 when x is NaN, since x * 0 is also NaN. Nor are they the
6045 same in modes with signed zeros, since multiplying a
6046 negative value by 0 gives -0, not +0. */
6047 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6048 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6049 && real_zerop (arg1))
6050 return omit_one_operand (type, arg1, arg0);
6051 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6052 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6053 && real_onep (arg1))
6054 return non_lvalue (convert (type, arg0));
6055
6056 /* Transform x * -1.0 into -x. */
6057 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6058 && real_minus_onep (arg1))
6059 return fold (build1 (NEGATE_EXPR, type, arg0));
6060
6061 if (flag_unsafe_math_optimizations)
6062 {
6063 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6064 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6065
6066 /* Optimizations of sqrt(...)*sqrt(...). */
6067 if ((fcode0 == BUILT_IN_SQRT && fcode1 == BUILT_IN_SQRT)
6068 || (fcode0 == BUILT_IN_SQRTF && fcode1 == BUILT_IN_SQRTF)
6069 || (fcode0 == BUILT_IN_SQRTL && fcode1 == BUILT_IN_SQRTL))
6070 {
6071 tree sqrtfn, arg, arglist;
6072 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6073 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6074
6075 /* Optimize sqrt(x)*sqrt(x) as x. */
6076 if (operand_equal_p (arg00, arg10, 0)
6077 && ! HONOR_SNANS (TYPE_MODE (type)))
6078 return arg00;
6079
6080 /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y). */
6081 sqrtfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6082 arg = fold (build (MULT_EXPR, type, arg00, arg10));
6083 arglist = build_tree_list (NULL_TREE, arg);
6084 return build_function_call_expr (sqrtfn, arglist);
6085 }
6086
6087 /* Optimize exp(x)*exp(y) as exp(x+y). */
6088 if ((fcode0 == BUILT_IN_EXP && fcode1 == BUILT_IN_EXP)
6089 || (fcode0 == BUILT_IN_EXPF && fcode1 == BUILT_IN_EXPF)
6090 || (fcode0 == BUILT_IN_EXPL && fcode1 == BUILT_IN_EXPL))
6091 {
6092 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6093 tree arg = build (PLUS_EXPR, type,
6094 TREE_VALUE (TREE_OPERAND (arg0, 1)),
6095 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6096 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6097 return build_function_call_expr (expfn, arglist);
6098 }
6099
6100 /* Optimizations of pow(...)*pow(...). */
6101 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6102 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6103 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6104 {
6105 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6106 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6107 1)));
6108 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6109 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6110 1)));
6111
6112 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6113 if (operand_equal_p (arg01, arg11, 0))
6114 {
6115 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6116 tree arg = build (MULT_EXPR, type, arg00, arg10);
6117 tree arglist = tree_cons (NULL_TREE, fold (arg),
6118 build_tree_list (NULL_TREE,
6119 arg01));
6120 return build_function_call_expr (powfn, arglist);
6121 }
6122
6123 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6124 if (operand_equal_p (arg00, arg10, 0))
6125 {
6126 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6127 tree arg = fold (build (PLUS_EXPR, type, arg01, arg11));
6128 tree arglist = tree_cons (NULL_TREE, arg00,
6129 build_tree_list (NULL_TREE,
6130 arg));
6131 return build_function_call_expr (powfn, arglist);
6132 }
6133 }
6134
6135 /* Optimize tan(x)*cos(x) as sin(x). */
6136 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6137 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6138 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6139 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6140 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6141 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6142 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6143 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6144 {
6145 tree sinfn;
6146
6147 switch (fcode0)
6148 {
6149 case BUILT_IN_TAN:
6150 case BUILT_IN_COS:
6151 sinfn = implicit_built_in_decls[BUILT_IN_SIN];
6152 break;
6153 case BUILT_IN_TANF:
6154 case BUILT_IN_COSF:
6155 sinfn = implicit_built_in_decls[BUILT_IN_SINF];
6156 break;
6157 case BUILT_IN_TANL:
6158 case BUILT_IN_COSL:
6159 sinfn = implicit_built_in_decls[BUILT_IN_SINL];
6160 break;
6161 default:
6162 sinfn = NULL_TREE;
6163 }
6164
6165 if (sinfn != NULL_TREE)
6166 return build_function_call_expr (sinfn,
6167 TREE_OPERAND (arg0, 1));
6168 }
6169
6170 /* Optimize x*pow(x,c) as pow(x,c+1). */
6171 if (fcode1 == BUILT_IN_POW
6172 || fcode1 == BUILT_IN_POWF
6173 || fcode1 == BUILT_IN_POWL)
6174 {
6175 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6176 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6177 1)));
6178 if (TREE_CODE (arg11) == REAL_CST
6179 && ! TREE_CONSTANT_OVERFLOW (arg11)
6180 && operand_equal_p (arg0, arg10, 0))
6181 {
6182 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6183 REAL_VALUE_TYPE c;
6184 tree arg, arglist;
6185
6186 c = TREE_REAL_CST (arg11);
6187 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6188 arg = build_real (type, c);
6189 arglist = build_tree_list (NULL_TREE, arg);
6190 arglist = tree_cons (NULL_TREE, arg0, arglist);
6191 return build_function_call_expr (powfn, arglist);
6192 }
6193 }
6194
6195 /* Optimize pow(x,c)*x as pow(x,c+1). */
6196 if (fcode0 == BUILT_IN_POW
6197 || fcode0 == BUILT_IN_POWF
6198 || fcode0 == BUILT_IN_POWL)
6199 {
6200 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6201 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6202 1)));
6203 if (TREE_CODE (arg01) == REAL_CST
6204 && ! TREE_CONSTANT_OVERFLOW (arg01)
6205 && operand_equal_p (arg1, arg00, 0))
6206 {
6207 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6208 REAL_VALUE_TYPE c;
6209 tree arg, arglist;
6210
6211 c = TREE_REAL_CST (arg01);
6212 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6213 arg = build_real (type, c);
6214 arglist = build_tree_list (NULL_TREE, arg);
6215 arglist = tree_cons (NULL_TREE, arg1, arglist);
6216 return build_function_call_expr (powfn, arglist);
6217 }
6218 }
6219
6220 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6221 if (! optimize_size
6222 && operand_equal_p (arg0, arg1, 0))
6223 {
6224 tree powfn;
6225
6226 if (type == double_type_node)
6227 powfn = implicit_built_in_decls[BUILT_IN_POW];
6228 else if (type == float_type_node)
6229 powfn = implicit_built_in_decls[BUILT_IN_POWF];
6230 else if (type == long_double_type_node)
6231 powfn = implicit_built_in_decls[BUILT_IN_POWL];
6232 else
6233 powfn = NULL_TREE;
6234
6235 if (powfn)
6236 {
6237 tree arg = build_real (type, dconst2);
6238 tree arglist = build_tree_list (NULL_TREE, arg);
6239 arglist = tree_cons (NULL_TREE, arg0, arglist);
6240 return build_function_call_expr (powfn, arglist);
6241 }
6242 }
6243 }
6244 }
6245 goto associate;
6246
6247 case BIT_IOR_EXPR:
6248 bit_ior:
6249 if (integer_all_onesp (arg1))
6250 return omit_one_operand (type, arg1, arg0);
6251 if (integer_zerop (arg1))
6252 return non_lvalue (convert (type, arg0));
6253 t1 = distribute_bit_expr (code, type, arg0, arg1);
6254 if (t1 != NULL_TREE)
6255 return t1;
6256
6257 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6258
6259 This results in more efficient code for machines without a NAND
6260 instruction. Combine will canonicalize to the first form
6261 which will allow use of NAND instructions provided by the
6262 backend if they exist. */
6263 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6264 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6265 {
6266 return fold (build1 (BIT_NOT_EXPR, type,
6267 build (BIT_AND_EXPR, type,
6268 TREE_OPERAND (arg0, 0),
6269 TREE_OPERAND (arg1, 0))));
6270 }
6271
6272 /* See if this can be simplified into a rotate first. If that
6273 is unsuccessful continue in the association code. */
6274 goto bit_rotate;
6275
6276 case BIT_XOR_EXPR:
6277 if (integer_zerop (arg1))
6278 return non_lvalue (convert (type, arg0));
6279 if (integer_all_onesp (arg1))
6280 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6281
6282 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6283 with a constant, and the two constants have no bits in common,
6284 we should treat this as a BIT_IOR_EXPR since this may produce more
6285 simplifications. */
6286 if (TREE_CODE (arg0) == BIT_AND_EXPR
6287 && TREE_CODE (arg1) == BIT_AND_EXPR
6288 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6289 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6290 && integer_zerop (const_binop (BIT_AND_EXPR,
6291 TREE_OPERAND (arg0, 1),
6292 TREE_OPERAND (arg1, 1), 0)))
6293 {
6294 code = BIT_IOR_EXPR;
6295 goto bit_ior;
6296 }
6297
6298 /* See if this can be simplified into a rotate first. If that
6299 is unsuccessful continue in the association code. */
6300 goto bit_rotate;
6301
6302 case BIT_AND_EXPR:
6303 bit_and:
6304 if (integer_all_onesp (arg1))
6305 return non_lvalue (convert (type, arg0));
6306 if (integer_zerop (arg1))
6307 return omit_one_operand (type, arg1, arg0);
6308 t1 = distribute_bit_expr (code, type, arg0, arg1);
6309 if (t1 != NULL_TREE)
6310 return t1;
6311 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6312 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6313 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6314 {
6315 unsigned int prec
6316 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6317
6318 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6319 && (~TREE_INT_CST_LOW (arg1)
6320 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6321 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
6322 }
6323
6324 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6325
6326 This results in more efficient code for machines without a NOR
6327 instruction. Combine will canonicalize to the first form
6328 which will allow use of NOR instructions provided by the
6329 backend if they exist. */
6330 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6331 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6332 {
6333 return fold (build1 (BIT_NOT_EXPR, type,
6334 build (BIT_IOR_EXPR, type,
6335 TREE_OPERAND (arg0, 0),
6336 TREE_OPERAND (arg1, 0))));
6337 }
6338
6339 goto associate;
6340
6341 case BIT_ANDTC_EXPR:
6342 if (integer_all_onesp (arg0))
6343 return non_lvalue (convert (type, arg1));
6344 if (integer_zerop (arg0))
6345 return omit_one_operand (type, arg0, arg1);
6346 if (TREE_CODE (arg1) == INTEGER_CST)
6347 {
6348 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
6349 code = BIT_AND_EXPR;
6350 goto bit_and;
6351 }
6352 goto binary;
6353
6354 case RDIV_EXPR:
6355 /* Don't touch a floating-point divide by zero unless the mode
6356 of the constant can represent infinity. */
6357 if (TREE_CODE (arg1) == REAL_CST
6358 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6359 && real_zerop (arg1))
6360 return t;
6361
6362 /* (-A) / (-B) -> A / B */
6363 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6364 return fold (build (RDIV_EXPR, type,
6365 TREE_OPERAND (arg0, 0),
6366 negate_expr (arg1)));
6367 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6368 return fold (build (RDIV_EXPR, type,
6369 negate_expr (arg0),
6370 TREE_OPERAND (arg1, 0)));
6371
6372 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6373 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6374 && real_onep (arg1))
6375 return non_lvalue (convert (type, arg0));
6376
6377 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
6378 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6379 && real_minus_onep (arg1))
6380 return non_lvalue (convert (type, negate_expr (arg0)));
6381
6382 /* If ARG1 is a constant, we can convert this to a multiply by the
6383 reciprocal. This does not have the same rounding properties,
6384 so only do this if -funsafe-math-optimizations. We can actually
6385 always safely do it if ARG1 is a power of two, but it's hard to
6386 tell if it is or not in a portable manner. */
6387 if (TREE_CODE (arg1) == REAL_CST)
6388 {
6389 if (flag_unsafe_math_optimizations
6390 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6391 arg1, 0)))
6392 return fold (build (MULT_EXPR, type, arg0, tem));
6393 /* Find the reciprocal if optimizing and the result is exact. */
6394 else if (optimize)
6395 {
6396 REAL_VALUE_TYPE r;
6397 r = TREE_REAL_CST (arg1);
6398 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6399 {
6400 tem = build_real (type, r);
6401 return fold (build (MULT_EXPR, type, arg0, tem));
6402 }
6403 }
6404 }
6405 /* Convert A/B/C to A/(B*C). */
6406 if (flag_unsafe_math_optimizations
6407 && TREE_CODE (arg0) == RDIV_EXPR)
6408 {
6409 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6410 build (MULT_EXPR, type, TREE_OPERAND (arg0, 1),
6411 arg1)));
6412 }
6413 /* Convert A/(B/C) to (A/B)*C. */
6414 if (flag_unsafe_math_optimizations
6415 && TREE_CODE (arg1) == RDIV_EXPR)
6416 {
6417 return fold (build (MULT_EXPR, type,
6418 build (RDIV_EXPR, type, arg0,
6419 TREE_OPERAND (arg1, 0)),
6420 TREE_OPERAND (arg1, 1)));
6421 }
6422
6423 if (flag_unsafe_math_optimizations)
6424 {
6425 enum built_in_function fcode = builtin_mathfn_code (arg1);
6426 /* Optimize x/exp(y) into x*exp(-y). */
6427 if (fcode == BUILT_IN_EXP
6428 || fcode == BUILT_IN_EXPF
6429 || fcode == BUILT_IN_EXPL)
6430 {
6431 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6432 tree arg = build1 (NEGATE_EXPR, type,
6433 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6434 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6435 arg1 = build_function_call_expr (expfn, arglist);
6436 return fold (build (MULT_EXPR, type, arg0, arg1));
6437 }
6438
6439 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6440 if (fcode == BUILT_IN_POW
6441 || fcode == BUILT_IN_POWF
6442 || fcode == BUILT_IN_POWL)
6443 {
6444 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6445 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6446 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6447 tree neg11 = fold (build1 (NEGATE_EXPR, type, arg11));
6448 tree arglist = tree_cons(NULL_TREE, arg10,
6449 build_tree_list (NULL_TREE, neg11));
6450 arg1 = build_function_call_expr (powfn, arglist);
6451 return fold (build (MULT_EXPR, type, arg0, arg1));
6452 }
6453 }
6454
6455 if (flag_unsafe_math_optimizations)
6456 {
6457 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6458 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6459
6460 /* Optimize sin(x)/cos(x) as tan(x). */
6461 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
6462 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
6463 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
6464 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6465 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6466 {
6467 tree tanfn;
6468
6469 if (fcode0 == BUILT_IN_SIN)
6470 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6471 else if (fcode0 == BUILT_IN_SINF)
6472 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6473 else if (fcode0 == BUILT_IN_SINL)
6474 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6475 else
6476 tanfn = NULL_TREE;
6477
6478 if (tanfn != NULL_TREE)
6479 return build_function_call_expr (tanfn,
6480 TREE_OPERAND (arg0, 1));
6481 }
6482
6483 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
6484 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
6485 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
6486 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
6487 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6488 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6489 {
6490 tree tanfn;
6491
6492 if (fcode0 == BUILT_IN_COS)
6493 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6494 else if (fcode0 == BUILT_IN_COSF)
6495 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6496 else if (fcode0 == BUILT_IN_COSL)
6497 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6498 else
6499 tanfn = NULL_TREE;
6500
6501 if (tanfn != NULL_TREE)
6502 {
6503 tree tmp = TREE_OPERAND (arg0, 1);
6504 tmp = build_function_call_expr (tanfn, tmp);
6505 return fold (build (RDIV_EXPR, type,
6506 build_real (type, dconst1),
6507 tmp));
6508 }
6509 }
6510
6511 /* Optimize pow(x,c)/x as pow(x,c-1). */
6512 if (fcode0 == BUILT_IN_POW
6513 || fcode0 == BUILT_IN_POWF
6514 || fcode0 == BUILT_IN_POWL)
6515 {
6516 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6517 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
6518 if (TREE_CODE (arg01) == REAL_CST
6519 && ! TREE_CONSTANT_OVERFLOW (arg01)
6520 && operand_equal_p (arg1, arg00, 0))
6521 {
6522 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6523 REAL_VALUE_TYPE c;
6524 tree arg, arglist;
6525
6526 c = TREE_REAL_CST (arg01);
6527 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
6528 arg = build_real (type, c);
6529 arglist = build_tree_list (NULL_TREE, arg);
6530 arglist = tree_cons (NULL_TREE, arg1, arglist);
6531 return build_function_call_expr (powfn, arglist);
6532 }
6533 }
6534 }
6535 goto binary;
6536
6537 case TRUNC_DIV_EXPR:
6538 case ROUND_DIV_EXPR:
6539 case FLOOR_DIV_EXPR:
6540 case CEIL_DIV_EXPR:
6541 case EXACT_DIV_EXPR:
6542 if (integer_onep (arg1))
6543 return non_lvalue (convert (type, arg0));
6544 if (integer_zerop (arg1))
6545 return t;
6546
6547 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6548 operation, EXACT_DIV_EXPR.
6549
6550 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6551 At one time others generated faster code, it's not clear if they do
6552 after the last round to changes to the DIV code in expmed.c. */
6553 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
6554 && multiple_of_p (type, arg0, arg1))
6555 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
6556
6557 if (TREE_CODE (arg1) == INTEGER_CST
6558 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6559 code, NULL_TREE)))
6560 return convert (type, tem);
6561
6562 goto binary;
6563
6564 case CEIL_MOD_EXPR:
6565 case FLOOR_MOD_EXPR:
6566 case ROUND_MOD_EXPR:
6567 case TRUNC_MOD_EXPR:
6568 if (integer_onep (arg1))
6569 return omit_one_operand (type, integer_zero_node, arg0);
6570 if (integer_zerop (arg1))
6571 return t;
6572
6573 if (TREE_CODE (arg1) == INTEGER_CST
6574 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6575 code, NULL_TREE)))
6576 return convert (type, tem);
6577
6578 goto binary;
6579
6580 case LROTATE_EXPR:
6581 case RROTATE_EXPR:
6582 if (integer_all_onesp (arg0))
6583 return omit_one_operand (type, arg0, arg1);
6584 goto shift;
6585
6586 case RSHIFT_EXPR:
6587 /* Optimize -1 >> x for arithmetic right shifts. */
6588 if (integer_all_onesp (arg0) && ! TREE_UNSIGNED (type))
6589 return omit_one_operand (type, arg0, arg1);
6590 /* ... fall through ... */
6591
6592 case LSHIFT_EXPR:
6593 shift:
6594 if (integer_zerop (arg1))
6595 return non_lvalue (convert (type, arg0));
6596 if (integer_zerop (arg0))
6597 return omit_one_operand (type, arg0, arg1);
6598
6599 /* Since negative shift count is not well-defined,
6600 don't try to compute it in the compiler. */
6601 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
6602 return t;
6603 /* Rewrite an LROTATE_EXPR by a constant into an
6604 RROTATE_EXPR by a new constant. */
6605 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
6606 {
6607 if (t == orig_t)
6608 t = copy_node (t);
6609 TREE_SET_CODE (t, RROTATE_EXPR);
6610 code = RROTATE_EXPR;
6611 TREE_OPERAND (t, 1) = arg1
6612 = const_binop
6613 (MINUS_EXPR,
6614 convert (TREE_TYPE (arg1),
6615 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
6616 arg1, 0);
6617 if (tree_int_cst_sgn (arg1) < 0)
6618 return t;
6619 }
6620
6621 /* If we have a rotate of a bit operation with the rotate count and
6622 the second operand of the bit operation both constant,
6623 permute the two operations. */
6624 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6625 && (TREE_CODE (arg0) == BIT_AND_EXPR
6626 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
6627 || TREE_CODE (arg0) == BIT_IOR_EXPR
6628 || TREE_CODE (arg0) == BIT_XOR_EXPR)
6629 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6630 return fold (build (TREE_CODE (arg0), type,
6631 fold (build (code, type,
6632 TREE_OPERAND (arg0, 0), arg1)),
6633 fold (build (code, type,
6634 TREE_OPERAND (arg0, 1), arg1))));
6635
6636 /* Two consecutive rotates adding up to the width of the mode can
6637 be ignored. */
6638 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6639 && TREE_CODE (arg0) == RROTATE_EXPR
6640 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6641 && TREE_INT_CST_HIGH (arg1) == 0
6642 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
6643 && ((TREE_INT_CST_LOW (arg1)
6644 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
6645 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
6646 return TREE_OPERAND (arg0, 0);
6647
6648 goto binary;
6649
6650 case MIN_EXPR:
6651 if (operand_equal_p (arg0, arg1, 0))
6652 return omit_one_operand (type, arg0, arg1);
6653 if (INTEGRAL_TYPE_P (type)
6654 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
6655 return omit_one_operand (type, arg1, arg0);
6656 goto associate;
6657
6658 case MAX_EXPR:
6659 if (operand_equal_p (arg0, arg1, 0))
6660 return omit_one_operand (type, arg0, arg1);
6661 if (INTEGRAL_TYPE_P (type)
6662 && TYPE_MAX_VALUE (type)
6663 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
6664 return omit_one_operand (type, arg1, arg0);
6665 goto associate;
6666
6667 case TRUTH_NOT_EXPR:
6668 /* Note that the operand of this must be an int
6669 and its values must be 0 or 1.
6670 ("true" is a fixed value perhaps depending on the language,
6671 but we don't handle values other than 1 correctly yet.) */
6672 tem = invert_truthvalue (arg0);
6673 /* Avoid infinite recursion. */
6674 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6675 {
6676 tem = fold_single_bit_test (code, arg0, arg1, type);
6677 if (tem)
6678 return tem;
6679 return t;
6680 }
6681 return convert (type, tem);
6682
6683 case TRUTH_ANDIF_EXPR:
6684 /* Note that the operands of this must be ints
6685 and their values must be 0 or 1.
6686 ("true" is a fixed value perhaps depending on the language.) */
6687 /* If first arg is constant zero, return it. */
6688 if (integer_zerop (arg0))
6689 return convert (type, arg0);
6690 case TRUTH_AND_EXPR:
6691 /* If either arg is constant true, drop it. */
6692 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6693 return non_lvalue (convert (type, arg1));
6694 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
6695 /* Preserve sequence points. */
6696 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
6697 return non_lvalue (convert (type, arg0));
6698 /* If second arg is constant zero, result is zero, but first arg
6699 must be evaluated. */
6700 if (integer_zerop (arg1))
6701 return omit_one_operand (type, arg1, arg0);
6702 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
6703 case will be handled here. */
6704 if (integer_zerop (arg0))
6705 return omit_one_operand (type, arg0, arg1);
6706
6707 truth_andor:
6708 /* We only do these simplifications if we are optimizing. */
6709 if (!optimize)
6710 return t;
6711
6712 /* Check for things like (A || B) && (A || C). We can convert this
6713 to A || (B && C). Note that either operator can be any of the four
6714 truth and/or operations and the transformation will still be
6715 valid. Also note that we only care about order for the
6716 ANDIF and ORIF operators. If B contains side effects, this
6717 might change the truth-value of A. */
6718 if (TREE_CODE (arg0) == TREE_CODE (arg1)
6719 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
6720 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
6721 || TREE_CODE (arg0) == TRUTH_AND_EXPR
6722 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
6723 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
6724 {
6725 tree a00 = TREE_OPERAND (arg0, 0);
6726 tree a01 = TREE_OPERAND (arg0, 1);
6727 tree a10 = TREE_OPERAND (arg1, 0);
6728 tree a11 = TREE_OPERAND (arg1, 1);
6729 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
6730 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
6731 && (code == TRUTH_AND_EXPR
6732 || code == TRUTH_OR_EXPR));
6733
6734 if (operand_equal_p (a00, a10, 0))
6735 return fold (build (TREE_CODE (arg0), type, a00,
6736 fold (build (code, type, a01, a11))));
6737 else if (commutative && operand_equal_p (a00, a11, 0))
6738 return fold (build (TREE_CODE (arg0), type, a00,
6739 fold (build (code, type, a01, a10))));
6740 else if (commutative && operand_equal_p (a01, a10, 0))
6741 return fold (build (TREE_CODE (arg0), type, a01,
6742 fold (build (code, type, a00, a11))));
6743
6744 /* This case if tricky because we must either have commutative
6745 operators or else A10 must not have side-effects. */
6746
6747 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
6748 && operand_equal_p (a01, a11, 0))
6749 return fold (build (TREE_CODE (arg0), type,
6750 fold (build (code, type, a00, a10)),
6751 a01));
6752 }
6753
6754 /* See if we can build a range comparison. */
6755 if (0 != (tem = fold_range_test (t)))
6756 return tem;
6757
6758 /* Check for the possibility of merging component references. If our
6759 lhs is another similar operation, try to merge its rhs with our
6760 rhs. Then try to merge our lhs and rhs. */
6761 if (TREE_CODE (arg0) == code
6762 && 0 != (tem = fold_truthop (code, type,
6763 TREE_OPERAND (arg0, 1), arg1)))
6764 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6765
6766 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
6767 return tem;
6768
6769 return t;
6770
6771 case TRUTH_ORIF_EXPR:
6772 /* Note that the operands of this must be ints
6773 and their values must be 0 or true.
6774 ("true" is a fixed value perhaps depending on the language.) */
6775 /* If first arg is constant true, return it. */
6776 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6777 return convert (type, arg0);
6778 case TRUTH_OR_EXPR:
6779 /* If either arg is constant zero, drop it. */
6780 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
6781 return non_lvalue (convert (type, arg1));
6782 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
6783 /* Preserve sequence points. */
6784 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
6785 return non_lvalue (convert (type, arg0));
6786 /* If second arg is constant true, result is true, but we must
6787 evaluate first arg. */
6788 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
6789 return omit_one_operand (type, arg1, arg0);
6790 /* Likewise for first arg, but note this only occurs here for
6791 TRUTH_OR_EXPR. */
6792 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6793 return omit_one_operand (type, arg0, arg1);
6794 goto truth_andor;
6795
6796 case TRUTH_XOR_EXPR:
6797 /* If either arg is constant zero, drop it. */
6798 if (integer_zerop (arg0))
6799 return non_lvalue (convert (type, arg1));
6800 if (integer_zerop (arg1))
6801 return non_lvalue (convert (type, arg0));
6802 /* If either arg is constant true, this is a logical inversion. */
6803 if (integer_onep (arg0))
6804 return non_lvalue (convert (type, invert_truthvalue (arg1)));
6805 if (integer_onep (arg1))
6806 return non_lvalue (convert (type, invert_truthvalue (arg0)));
6807 return t;
6808
6809 case EQ_EXPR:
6810 case NE_EXPR:
6811 case LT_EXPR:
6812 case GT_EXPR:
6813 case LE_EXPR:
6814 case GE_EXPR:
6815 /* If one arg is a real or integer constant, put it last. */
6816 if ((TREE_CODE (arg0) == INTEGER_CST
6817 && TREE_CODE (arg1) != INTEGER_CST)
6818 || (TREE_CODE (arg0) == REAL_CST
6819 && TREE_CODE (arg0) != REAL_CST))
6820 {
6821 if (t == orig_t)
6822 t = copy_node (t);
6823 TREE_OPERAND (t, 0) = arg1;
6824 TREE_OPERAND (t, 1) = arg0;
6825 arg0 = TREE_OPERAND (t, 0);
6826 arg1 = TREE_OPERAND (t, 1);
6827 code = swap_tree_comparison (code);
6828 TREE_SET_CODE (t, code);
6829 }
6830
6831 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
6832 {
6833 tree targ0 = strip_float_extensions (arg0);
6834 tree targ1 = strip_float_extensions (arg1);
6835 tree newtype = TREE_TYPE (targ0);
6836
6837 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
6838 newtype = TREE_TYPE (targ1);
6839
6840 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
6841 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
6842 return fold (build (code, type, convert (newtype, targ0),
6843 convert (newtype, targ1)));
6844
6845 /* (-a) CMP (-b) -> b CMP a */
6846 if (TREE_CODE (arg0) == NEGATE_EXPR
6847 && TREE_CODE (arg1) == NEGATE_EXPR)
6848 return fold (build (code, type, TREE_OPERAND (arg1, 0),
6849 TREE_OPERAND (arg0, 0)));
6850
6851 if (TREE_CODE (arg1) == REAL_CST)
6852 {
6853 REAL_VALUE_TYPE cst;
6854 cst = TREE_REAL_CST (arg1);
6855
6856 /* (-a) CMP CST -> a swap(CMP) (-CST) */
6857 if (TREE_CODE (arg0) == NEGATE_EXPR)
6858 return
6859 fold (build (swap_tree_comparison (code), type,
6860 TREE_OPERAND (arg0, 0),
6861 build_real (TREE_TYPE (arg1),
6862 REAL_VALUE_NEGATE (cst))));
6863
6864 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
6865 /* a CMP (-0) -> a CMP 0 */
6866 if (REAL_VALUE_MINUS_ZERO (cst))
6867 return fold (build (code, type, arg0,
6868 build_real (TREE_TYPE (arg1), dconst0)));
6869
6870 /* x != NaN is always true, other ops are always false. */
6871 if (REAL_VALUE_ISNAN (cst)
6872 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
6873 {
6874 t = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
6875 return omit_one_operand (type, convert (type, t), arg0);
6876 }
6877
6878 /* Fold comparisons against infinity. */
6879 if (REAL_VALUE_ISINF (cst))
6880 {
6881 tem = fold_inf_compare (code, type, arg0, arg1);
6882 if (tem != NULL_TREE)
6883 return tem;
6884 }
6885 }
6886
6887 /* If this is a comparison of a real constant with a PLUS_EXPR
6888 or a MINUS_EXPR of a real constant, we can convert it into a
6889 comparison with a revised real constant as long as no overflow
6890 occurs when unsafe_math_optimizations are enabled. */
6891 if (flag_unsafe_math_optimizations
6892 && TREE_CODE (arg1) == REAL_CST
6893 && (TREE_CODE (arg0) == PLUS_EXPR
6894 || TREE_CODE (arg0) == MINUS_EXPR)
6895 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6896 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6897 ? MINUS_EXPR : PLUS_EXPR,
6898 arg1, TREE_OPERAND (arg0, 1), 0))
6899 && ! TREE_CONSTANT_OVERFLOW (tem))
6900 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6901
6902 /* Likewise, we can simplify a comparison of a real constant with
6903 a MINUS_EXPR whose first operand is also a real constant, i.e.
6904 (c1 - x) < c2 becomes x > c1-c2. */
6905 if (flag_unsafe_math_optimizations
6906 && TREE_CODE (arg1) == REAL_CST
6907 && TREE_CODE (arg0) == MINUS_EXPR
6908 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
6909 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
6910 arg1, 0))
6911 && ! TREE_CONSTANT_OVERFLOW (tem))
6912 return fold (build (swap_tree_comparison (code), type,
6913 TREE_OPERAND (arg0, 1), tem));
6914
6915 /* Fold comparisons against built-in math functions. */
6916 if (TREE_CODE (arg1) == REAL_CST
6917 && flag_unsafe_math_optimizations
6918 && ! flag_errno_math)
6919 {
6920 enum built_in_function fcode = builtin_mathfn_code (arg0);
6921
6922 if (fcode != END_BUILTINS)
6923 {
6924 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
6925 if (tem != NULL_TREE)
6926 return tem;
6927 }
6928 }
6929 }
6930
6931 /* Convert foo++ == CONST into ++foo == CONST + INCR.
6932 First, see if one arg is constant; find the constant arg
6933 and the other one. */
6934 {
6935 tree constop = 0, varop = NULL_TREE;
6936 int constopnum = -1;
6937
6938 if (TREE_CONSTANT (arg1))
6939 constopnum = 1, constop = arg1, varop = arg0;
6940 if (TREE_CONSTANT (arg0))
6941 constopnum = 0, constop = arg0, varop = arg1;
6942
6943 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
6944 {
6945 /* This optimization is invalid for ordered comparisons
6946 if CONST+INCR overflows or if foo+incr might overflow.
6947 This optimization is invalid for floating point due to rounding.
6948 For pointer types we assume overflow doesn't happen. */
6949 if (POINTER_TYPE_P (TREE_TYPE (varop))
6950 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6951 && (code == EQ_EXPR || code == NE_EXPR)))
6952 {
6953 tree newconst
6954 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
6955 constop, TREE_OPERAND (varop, 1)));
6956
6957 /* Do not overwrite the current varop to be a preincrement,
6958 create a new node so that we won't confuse our caller who
6959 might create trees and throw them away, reusing the
6960 arguments that they passed to build. This shows up in
6961 the THEN or ELSE parts of ?: being postincrements. */
6962 varop = build (PREINCREMENT_EXPR, TREE_TYPE (varop),
6963 TREE_OPERAND (varop, 0),
6964 TREE_OPERAND (varop, 1));
6965
6966 /* If VAROP is a reference to a bitfield, we must mask
6967 the constant by the width of the field. */
6968 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6969 && DECL_BIT_FIELD(TREE_OPERAND
6970 (TREE_OPERAND (varop, 0), 1)))
6971 {
6972 int size
6973 = TREE_INT_CST_LOW (DECL_SIZE
6974 (TREE_OPERAND
6975 (TREE_OPERAND (varop, 0), 1)));
6976 tree mask, unsigned_type;
6977 unsigned int precision;
6978 tree folded_compare;
6979
6980 /* First check whether the comparison would come out
6981 always the same. If we don't do that we would
6982 change the meaning with the masking. */
6983 if (constopnum == 0)
6984 folded_compare = fold (build (code, type, constop,
6985 TREE_OPERAND (varop, 0)));
6986 else
6987 folded_compare = fold (build (code, type,
6988 TREE_OPERAND (varop, 0),
6989 constop));
6990 if (integer_zerop (folded_compare)
6991 || integer_onep (folded_compare))
6992 return omit_one_operand (type, folded_compare, varop);
6993
6994 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
6995 precision = TYPE_PRECISION (unsigned_type);
6996 mask = build_int_2 (~0, ~0);
6997 TREE_TYPE (mask) = unsigned_type;
6998 force_fit_type (mask, 0);
6999 mask = const_binop (RSHIFT_EXPR, mask,
7000 size_int (precision - size), 0);
7001 newconst = fold (build (BIT_AND_EXPR,
7002 TREE_TYPE (varop), newconst,
7003 convert (TREE_TYPE (varop),
7004 mask)));
7005 }
7006
7007 t = build (code, type,
7008 (constopnum == 0) ? newconst : varop,
7009 (constopnum == 1) ? newconst : varop);
7010 return t;
7011 }
7012 }
7013 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
7014 {
7015 if (POINTER_TYPE_P (TREE_TYPE (varop))
7016 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
7017 && (code == EQ_EXPR || code == NE_EXPR)))
7018 {
7019 tree newconst
7020 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
7021 constop, TREE_OPERAND (varop, 1)));
7022
7023 /* Do not overwrite the current varop to be a predecrement,
7024 create a new node so that we won't confuse our caller who
7025 might create trees and throw them away, reusing the
7026 arguments that they passed to build. This shows up in
7027 the THEN or ELSE parts of ?: being postdecrements. */
7028 varop = build (PREDECREMENT_EXPR, TREE_TYPE (varop),
7029 TREE_OPERAND (varop, 0),
7030 TREE_OPERAND (varop, 1));
7031
7032 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7033 && DECL_BIT_FIELD(TREE_OPERAND
7034 (TREE_OPERAND (varop, 0), 1)))
7035 {
7036 int size
7037 = TREE_INT_CST_LOW (DECL_SIZE
7038 (TREE_OPERAND
7039 (TREE_OPERAND (varop, 0), 1)));
7040 tree mask, unsigned_type;
7041 unsigned int precision;
7042 tree folded_compare;
7043
7044 if (constopnum == 0)
7045 folded_compare = fold (build (code, type, constop,
7046 TREE_OPERAND (varop, 0)));
7047 else
7048 folded_compare = fold (build (code, type,
7049 TREE_OPERAND (varop, 0),
7050 constop));
7051 if (integer_zerop (folded_compare)
7052 || integer_onep (folded_compare))
7053 return omit_one_operand (type, folded_compare, varop);
7054
7055 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
7056 precision = TYPE_PRECISION (unsigned_type);
7057 mask = build_int_2 (~0, ~0);
7058 TREE_TYPE (mask) = TREE_TYPE (varop);
7059 force_fit_type (mask, 0);
7060 mask = const_binop (RSHIFT_EXPR, mask,
7061 size_int (precision - size), 0);
7062 newconst = fold (build (BIT_AND_EXPR,
7063 TREE_TYPE (varop), newconst,
7064 convert (TREE_TYPE (varop),
7065 mask)));
7066 }
7067
7068 t = build (code, type,
7069 (constopnum == 0) ? newconst : varop,
7070 (constopnum == 1) ? newconst : varop);
7071 return t;
7072 }
7073 }
7074 }
7075
7076 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7077 This transformation affects the cases which are handled in later
7078 optimizations involving comparisons with non-negative constants. */
7079 if (TREE_CODE (arg1) == INTEGER_CST
7080 && TREE_CODE (arg0) != INTEGER_CST
7081 && tree_int_cst_sgn (arg1) > 0)
7082 {
7083 switch (code)
7084 {
7085 case GE_EXPR:
7086 code = GT_EXPR;
7087 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7088 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7089 break;
7090
7091 case LT_EXPR:
7092 code = LE_EXPR;
7093 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7094 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7095 break;
7096
7097 default:
7098 break;
7099 }
7100 }
7101
7102 /* Comparisons with the highest or lowest possible integer of
7103 the specified size will have known values. */
7104 {
7105 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7106
7107 if (TREE_CODE (arg1) == INTEGER_CST
7108 && ! TREE_CONSTANT_OVERFLOW (arg1)
7109 && width <= HOST_BITS_PER_WIDE_INT
7110 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7111 || POINTER_TYPE_P (TREE_TYPE (arg1))))
7112 {
7113 unsigned HOST_WIDE_INT signed_max;
7114 unsigned HOST_WIDE_INT max, min;
7115
7116 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7117
7118 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7119 {
7120 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7121 min = 0;
7122 }
7123 else
7124 {
7125 max = signed_max;
7126 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7127 }
7128
7129 if (TREE_INT_CST_HIGH (arg1) == 0
7130 && TREE_INT_CST_LOW (arg1) == max)
7131 switch (code)
7132 {
7133 case GT_EXPR:
7134 return omit_one_operand (type,
7135 convert (type, integer_zero_node),
7136 arg0);
7137 case GE_EXPR:
7138 code = EQ_EXPR;
7139 if (t == orig_t)
7140 t = copy_node (t);
7141 TREE_SET_CODE (t, EQ_EXPR);
7142 break;
7143 case LE_EXPR:
7144 return omit_one_operand (type,
7145 convert (type, integer_one_node),
7146 arg0);
7147 case LT_EXPR:
7148 code = NE_EXPR;
7149 if (t == orig_t)
7150 t = copy_node (t);
7151 TREE_SET_CODE (t, NE_EXPR);
7152 break;
7153
7154 /* The GE_EXPR and LT_EXPR cases above are not normally
7155 reached because of previous transformations. */
7156
7157 default:
7158 break;
7159 }
7160 else if (TREE_INT_CST_HIGH (arg1) == 0
7161 && TREE_INT_CST_LOW (arg1) == max - 1)
7162 switch (code)
7163 {
7164 case GT_EXPR:
7165 code = EQ_EXPR;
7166 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7167 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7168 break;
7169 case LE_EXPR:
7170 code = NE_EXPR;
7171 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7172 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7173 break;
7174 default:
7175 break;
7176 }
7177 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7178 && TREE_INT_CST_LOW (arg1) == min)
7179 switch (code)
7180 {
7181 case LT_EXPR:
7182 return omit_one_operand (type,
7183 convert (type, integer_zero_node),
7184 arg0);
7185 case LE_EXPR:
7186 code = EQ_EXPR;
7187 if (t == orig_t)
7188 t = copy_node (t);
7189 TREE_SET_CODE (t, EQ_EXPR);
7190 break;
7191
7192 case GE_EXPR:
7193 return omit_one_operand (type,
7194 convert (type, integer_one_node),
7195 arg0);
7196 case GT_EXPR:
7197 code = NE_EXPR;
7198 if (t == orig_t)
7199 t = copy_node (t);
7200 TREE_SET_CODE (t, NE_EXPR);
7201 break;
7202
7203 default:
7204 break;
7205 }
7206 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7207 && TREE_INT_CST_LOW (arg1) == min + 1)
7208 switch (code)
7209 {
7210 case GE_EXPR:
7211 code = NE_EXPR;
7212 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7213 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7214 break;
7215 case LT_EXPR:
7216 code = EQ_EXPR;
7217 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7218 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7219 break;
7220 default:
7221 break;
7222 }
7223
7224 else if (TREE_INT_CST_HIGH (arg1) == 0
7225 && TREE_INT_CST_LOW (arg1) == signed_max
7226 && TREE_UNSIGNED (TREE_TYPE (arg1))
7227 /* signed_type does not work on pointer types. */
7228 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7229 {
7230 /* The following case also applies to X < signed_max+1
7231 and X >= signed_max+1 because previous transformations. */
7232 if (code == LE_EXPR || code == GT_EXPR)
7233 {
7234 tree st0, st1;
7235 st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
7236 st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
7237 return fold
7238 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7239 type, convert (st0, arg0),
7240 convert (st1, integer_zero_node)));
7241 }
7242 }
7243 }
7244 }
7245
7246 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7247 a MINUS_EXPR of a constant, we can convert it into a comparison with
7248 a revised constant as long as no overflow occurs. */
7249 if ((code == EQ_EXPR || code == NE_EXPR)
7250 && TREE_CODE (arg1) == INTEGER_CST
7251 && (TREE_CODE (arg0) == PLUS_EXPR
7252 || TREE_CODE (arg0) == MINUS_EXPR)
7253 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7254 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7255 ? MINUS_EXPR : PLUS_EXPR,
7256 arg1, TREE_OPERAND (arg0, 1), 0))
7257 && ! TREE_CONSTANT_OVERFLOW (tem))
7258 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7259
7260 /* Similarly for a NEGATE_EXPR. */
7261 else if ((code == EQ_EXPR || code == NE_EXPR)
7262 && TREE_CODE (arg0) == NEGATE_EXPR
7263 && TREE_CODE (arg1) == INTEGER_CST
7264 && 0 != (tem = negate_expr (arg1))
7265 && TREE_CODE (tem) == INTEGER_CST
7266 && ! TREE_CONSTANT_OVERFLOW (tem))
7267 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7268
7269 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7270 for !=. Don't do this for ordered comparisons due to overflow. */
7271 else if ((code == NE_EXPR || code == EQ_EXPR)
7272 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7273 return fold (build (code, type,
7274 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7275
7276 /* If we are widening one operand of an integer comparison,
7277 see if the other operand is similarly being widened. Perhaps we
7278 can do the comparison in the narrower type. */
7279 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7280 && TREE_CODE (arg0) == NOP_EXPR
7281 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7282 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7283 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7284 || (TREE_CODE (t1) == INTEGER_CST
7285 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7286 return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
7287
7288 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7289 constant, we can simplify it. */
7290 else if (TREE_CODE (arg1) == INTEGER_CST
7291 && (TREE_CODE (arg0) == MIN_EXPR
7292 || TREE_CODE (arg0) == MAX_EXPR)
7293 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7294 return optimize_minmax_comparison (t);
7295
7296 /* If we are comparing an ABS_EXPR with a constant, we can
7297 convert all the cases into explicit comparisons, but they may
7298 well not be faster than doing the ABS and one comparison.
7299 But ABS (X) <= C is a range comparison, which becomes a subtraction
7300 and a comparison, and is probably faster. */
7301 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7302 && TREE_CODE (arg0) == ABS_EXPR
7303 && ! TREE_SIDE_EFFECTS (arg0)
7304 && (0 != (tem = negate_expr (arg1)))
7305 && TREE_CODE (tem) == INTEGER_CST
7306 && ! TREE_CONSTANT_OVERFLOW (tem))
7307 return fold (build (TRUTH_ANDIF_EXPR, type,
7308 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
7309 build (LE_EXPR, type,
7310 TREE_OPERAND (arg0, 0), arg1)));
7311
7312 /* If this is an EQ or NE comparison with zero and ARG0 is
7313 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7314 two operations, but the latter can be done in one less insn
7315 on machines that have only two-operand insns or on which a
7316 constant cannot be the first operand. */
7317 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7318 && TREE_CODE (arg0) == BIT_AND_EXPR)
7319 {
7320 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
7321 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
7322 return
7323 fold (build (code, type,
7324 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7325 build (RSHIFT_EXPR,
7326 TREE_TYPE (TREE_OPERAND (arg0, 0)),
7327 TREE_OPERAND (arg0, 1),
7328 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
7329 convert (TREE_TYPE (arg0),
7330 integer_one_node)),
7331 arg1));
7332 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7333 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7334 return
7335 fold (build (code, type,
7336 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7337 build (RSHIFT_EXPR,
7338 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7339 TREE_OPERAND (arg0, 0),
7340 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
7341 convert (TREE_TYPE (arg0),
7342 integer_one_node)),
7343 arg1));
7344 }
7345
7346 /* If this is an NE or EQ comparison of zero against the result of a
7347 signed MOD operation whose second operand is a power of 2, make
7348 the MOD operation unsigned since it is simpler and equivalent. */
7349 if ((code == NE_EXPR || code == EQ_EXPR)
7350 && integer_zerop (arg1)
7351 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
7352 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7353 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7354 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7355 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7356 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7357 {
7358 tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
7359 tree newmod = build (TREE_CODE (arg0), newtype,
7360 convert (newtype, TREE_OPERAND (arg0, 0)),
7361 convert (newtype, TREE_OPERAND (arg0, 1)));
7362
7363 return build (code, type, newmod, convert (newtype, arg1));
7364 }
7365
7366 /* If this is an NE comparison of zero with an AND of one, remove the
7367 comparison since the AND will give the correct value. */
7368 if (code == NE_EXPR && integer_zerop (arg1)
7369 && TREE_CODE (arg0) == BIT_AND_EXPR
7370 && integer_onep (TREE_OPERAND (arg0, 1)))
7371 return convert (type, arg0);
7372
7373 /* If we have (A & C) == C where C is a power of 2, convert this into
7374 (A & C) != 0. Similarly for NE_EXPR. */
7375 if ((code == EQ_EXPR || code == NE_EXPR)
7376 && TREE_CODE (arg0) == BIT_AND_EXPR
7377 && integer_pow2p (TREE_OPERAND (arg0, 1))
7378 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7379 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7380 arg0, integer_zero_node));
7381
7382 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7383 2, then fold the expression into shifts and logical operations. */
7384 tem = fold_single_bit_test (code, arg0, arg1, type);
7385 if (tem)
7386 return tem;
7387
7388 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7389 Similarly for NE_EXPR. */
7390 if ((code == EQ_EXPR || code == NE_EXPR)
7391 && TREE_CODE (arg0) == BIT_AND_EXPR
7392 && TREE_CODE (arg1) == INTEGER_CST
7393 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7394 {
7395 tree dandnotc = fold (build (BIT_ANDTC_EXPR, TREE_TYPE (arg0),
7396 arg1, TREE_OPERAND (arg0, 1)));
7397 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7398 if (!integer_zerop (dandnotc))
7399 return omit_one_operand (type, rslt, arg0);
7400 }
7401
7402 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7403 Similarly for NE_EXPR. */
7404 if ((code == EQ_EXPR || code == NE_EXPR)
7405 && TREE_CODE (arg0) == BIT_IOR_EXPR
7406 && TREE_CODE (arg1) == INTEGER_CST
7407 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7408 {
7409 tree candnotd = fold (build (BIT_ANDTC_EXPR, TREE_TYPE (arg0),
7410 TREE_OPERAND (arg0, 1), arg1));
7411 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7412 if (!integer_zerop (candnotd))
7413 return omit_one_operand (type, rslt, arg0);
7414 }
7415
7416 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7417 and similarly for >= into !=. */
7418 if ((code == LT_EXPR || code == GE_EXPR)
7419 && TREE_UNSIGNED (TREE_TYPE (arg0))
7420 && TREE_CODE (arg1) == LSHIFT_EXPR
7421 && integer_onep (TREE_OPERAND (arg1, 0)))
7422 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7423 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7424 TREE_OPERAND (arg1, 1)),
7425 convert (TREE_TYPE (arg0), integer_zero_node));
7426
7427 else if ((code == LT_EXPR || code == GE_EXPR)
7428 && TREE_UNSIGNED (TREE_TYPE (arg0))
7429 && (TREE_CODE (arg1) == NOP_EXPR
7430 || TREE_CODE (arg1) == CONVERT_EXPR)
7431 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7432 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7433 return
7434 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7435 convert (TREE_TYPE (arg0),
7436 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7437 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
7438 convert (TREE_TYPE (arg0), integer_zero_node));
7439
7440 /* Simplify comparison of something with itself. (For IEEE
7441 floating-point, we can only do some of these simplifications.) */
7442 if (operand_equal_p (arg0, arg1, 0))
7443 {
7444 switch (code)
7445 {
7446 case EQ_EXPR:
7447 case GE_EXPR:
7448 case LE_EXPR:
7449 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7450 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7451 return constant_boolean_node (1, type);
7452 code = EQ_EXPR;
7453 if (t == orig_t)
7454 t = copy_node (t);
7455 TREE_SET_CODE (t, code);
7456 break;
7457
7458 case NE_EXPR:
7459 /* For NE, we can only do this simplification if integer
7460 or we don't honor IEEE floating point NaNs. */
7461 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7462 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7463 break;
7464 /* ... fall through ... */
7465 case GT_EXPR:
7466 case LT_EXPR:
7467 return constant_boolean_node (0, type);
7468 default:
7469 abort ();
7470 }
7471 }
7472
7473 /* If we are comparing an expression that just has comparisons
7474 of two integer values, arithmetic expressions of those comparisons,
7475 and constants, we can simplify it. There are only three cases
7476 to check: the two values can either be equal, the first can be
7477 greater, or the second can be greater. Fold the expression for
7478 those three values. Since each value must be 0 or 1, we have
7479 eight possibilities, each of which corresponds to the constant 0
7480 or 1 or one of the six possible comparisons.
7481
7482 This handles common cases like (a > b) == 0 but also handles
7483 expressions like ((x > y) - (y > x)) > 0, which supposedly
7484 occur in macroized code. */
7485
7486 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7487 {
7488 tree cval1 = 0, cval2 = 0;
7489 int save_p = 0;
7490
7491 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7492 /* Don't handle degenerate cases here; they should already
7493 have been handled anyway. */
7494 && cval1 != 0 && cval2 != 0
7495 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7496 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7497 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7498 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7499 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7500 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7501 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7502 {
7503 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7504 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7505
7506 /* We can't just pass T to eval_subst in case cval1 or cval2
7507 was the same as ARG1. */
7508
7509 tree high_result
7510 = fold (build (code, type,
7511 eval_subst (arg0, cval1, maxval, cval2, minval),
7512 arg1));
7513 tree equal_result
7514 = fold (build (code, type,
7515 eval_subst (arg0, cval1, maxval, cval2, maxval),
7516 arg1));
7517 tree low_result
7518 = fold (build (code, type,
7519 eval_subst (arg0, cval1, minval, cval2, maxval),
7520 arg1));
7521
7522 /* All three of these results should be 0 or 1. Confirm they
7523 are. Then use those values to select the proper code
7524 to use. */
7525
7526 if ((integer_zerop (high_result)
7527 || integer_onep (high_result))
7528 && (integer_zerop (equal_result)
7529 || integer_onep (equal_result))
7530 && (integer_zerop (low_result)
7531 || integer_onep (low_result)))
7532 {
7533 /* Make a 3-bit mask with the high-order bit being the
7534 value for `>', the next for '=', and the low for '<'. */
7535 switch ((integer_onep (high_result) * 4)
7536 + (integer_onep (equal_result) * 2)
7537 + integer_onep (low_result))
7538 {
7539 case 0:
7540 /* Always false. */
7541 return omit_one_operand (type, integer_zero_node, arg0);
7542 case 1:
7543 code = LT_EXPR;
7544 break;
7545 case 2:
7546 code = EQ_EXPR;
7547 break;
7548 case 3:
7549 code = LE_EXPR;
7550 break;
7551 case 4:
7552 code = GT_EXPR;
7553 break;
7554 case 5:
7555 code = NE_EXPR;
7556 break;
7557 case 6:
7558 code = GE_EXPR;
7559 break;
7560 case 7:
7561 /* Always true. */
7562 return omit_one_operand (type, integer_one_node, arg0);
7563 }
7564
7565 t = build (code, type, cval1, cval2);
7566 if (save_p)
7567 return save_expr (t);
7568 else
7569 return fold (t);
7570 }
7571 }
7572 }
7573
7574 /* If this is a comparison of a field, we may be able to simplify it. */
7575 if (((TREE_CODE (arg0) == COMPONENT_REF
7576 && (*lang_hooks.can_use_bit_fields_p) ())
7577 || TREE_CODE (arg0) == BIT_FIELD_REF)
7578 && (code == EQ_EXPR || code == NE_EXPR)
7579 /* Handle the constant case even without -O
7580 to make sure the warnings are given. */
7581 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
7582 {
7583 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
7584 return t1 ? t1 : t;
7585 }
7586
7587 /* If this is a comparison of complex values and either or both sides
7588 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7589 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7590 This may prevent needless evaluations. */
7591 if ((code == EQ_EXPR || code == NE_EXPR)
7592 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
7593 && (TREE_CODE (arg0) == COMPLEX_EXPR
7594 || TREE_CODE (arg1) == COMPLEX_EXPR
7595 || TREE_CODE (arg0) == COMPLEX_CST
7596 || TREE_CODE (arg1) == COMPLEX_CST))
7597 {
7598 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
7599 tree real0, imag0, real1, imag1;
7600
7601 arg0 = save_expr (arg0);
7602 arg1 = save_expr (arg1);
7603 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
7604 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
7605 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
7606 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
7607
7608 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
7609 : TRUTH_ORIF_EXPR),
7610 type,
7611 fold (build (code, type, real0, real1)),
7612 fold (build (code, type, imag0, imag1))));
7613 }
7614
7615 /* Optimize comparisons of strlen vs zero to a compare of the
7616 first character of the string vs zero. To wit,
7617 strlen(ptr) == 0 => *ptr == 0
7618 strlen(ptr) != 0 => *ptr != 0
7619 Other cases should reduce to one of these two (or a constant)
7620 due to the return value of strlen being unsigned. */
7621 if ((code == EQ_EXPR || code == NE_EXPR)
7622 && integer_zerop (arg1)
7623 && TREE_CODE (arg0) == CALL_EXPR)
7624 {
7625 tree fndecl = get_callee_fndecl (arg0);
7626 tree arglist;
7627
7628 if (fndecl
7629 && DECL_BUILT_IN (fndecl)
7630 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
7631 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
7632 && (arglist = TREE_OPERAND (arg0, 1))
7633 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
7634 && ! TREE_CHAIN (arglist))
7635 return fold (build (code, type,
7636 build1 (INDIRECT_REF, char_type_node,
7637 TREE_VALUE(arglist)),
7638 integer_zero_node));
7639 }
7640
7641 /* From here on, the only cases we handle are when the result is
7642 known to be a constant.
7643
7644 To compute GT, swap the arguments and do LT.
7645 To compute GE, do LT and invert the result.
7646 To compute LE, swap the arguments, do LT and invert the result.
7647 To compute NE, do EQ and invert the result.
7648
7649 Therefore, the code below must handle only EQ and LT. */
7650
7651 if (code == LE_EXPR || code == GT_EXPR)
7652 {
7653 tem = arg0, arg0 = arg1, arg1 = tem;
7654 code = swap_tree_comparison (code);
7655 }
7656
7657 /* Note that it is safe to invert for real values here because we
7658 will check below in the one case that it matters. */
7659
7660 t1 = NULL_TREE;
7661 invert = 0;
7662 if (code == NE_EXPR || code == GE_EXPR)
7663 {
7664 invert = 1;
7665 code = invert_tree_comparison (code);
7666 }
7667
7668 /* Compute a result for LT or EQ if args permit;
7669 otherwise return T. */
7670 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
7671 {
7672 if (code == EQ_EXPR)
7673 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
7674 else
7675 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
7676 ? INT_CST_LT_UNSIGNED (arg0, arg1)
7677 : INT_CST_LT (arg0, arg1)),
7678 0);
7679 }
7680
7681 #if 0 /* This is no longer useful, but breaks some real code. */
7682 /* Assume a nonexplicit constant cannot equal an explicit one,
7683 since such code would be undefined anyway.
7684 Exception: on sysvr4, using #pragma weak,
7685 a label can come out as 0. */
7686 else if (TREE_CODE (arg1) == INTEGER_CST
7687 && !integer_zerop (arg1)
7688 && TREE_CONSTANT (arg0)
7689 && TREE_CODE (arg0) == ADDR_EXPR
7690 && code == EQ_EXPR)
7691 t1 = build_int_2 (0, 0);
7692 #endif
7693 /* Two real constants can be compared explicitly. */
7694 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
7695 {
7696 /* If either operand is a NaN, the result is false with two
7697 exceptions: First, an NE_EXPR is true on NaNs, but that case
7698 is already handled correctly since we will be inverting the
7699 result for NE_EXPR. Second, if we had inverted a LE_EXPR
7700 or a GE_EXPR into a LT_EXPR, we must return true so that it
7701 will be inverted into false. */
7702
7703 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
7704 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
7705 t1 = build_int_2 (invert && code == LT_EXPR, 0);
7706
7707 else if (code == EQ_EXPR)
7708 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
7709 TREE_REAL_CST (arg1)),
7710 0);
7711 else
7712 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
7713 TREE_REAL_CST (arg1)),
7714 0);
7715 }
7716
7717 if (t1 == NULL_TREE)
7718 return t;
7719
7720 if (invert)
7721 TREE_INT_CST_LOW (t1) ^= 1;
7722
7723 TREE_TYPE (t1) = type;
7724 if (TREE_CODE (type) == BOOLEAN_TYPE)
7725 return (*lang_hooks.truthvalue_conversion) (t1);
7726 return t1;
7727
7728 case COND_EXPR:
7729 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
7730 so all simple results must be passed through pedantic_non_lvalue. */
7731 if (TREE_CODE (arg0) == INTEGER_CST)
7732 return pedantic_non_lvalue
7733 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
7734 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
7735 return pedantic_omit_one_operand (type, arg1, arg0);
7736
7737 /* If the second operand is zero, invert the comparison and swap
7738 the second and third operands. Likewise if the second operand
7739 is constant and the third is not or if the third operand is
7740 equivalent to the first operand of the comparison. */
7741
7742 if (integer_zerop (arg1)
7743 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
7744 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7745 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7746 TREE_OPERAND (t, 2),
7747 TREE_OPERAND (arg0, 1))))
7748 {
7749 /* See if this can be inverted. If it can't, possibly because
7750 it was a floating-point inequality comparison, don't do
7751 anything. */
7752 tem = invert_truthvalue (arg0);
7753
7754 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7755 {
7756 t = build (code, type, tem,
7757 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
7758 arg0 = tem;
7759 /* arg1 should be the first argument of the new T. */
7760 arg1 = TREE_OPERAND (t, 1);
7761 STRIP_NOPS (arg1);
7762 }
7763 }
7764
7765 /* If we have A op B ? A : C, we may be able to convert this to a
7766 simpler expression, depending on the operation and the values
7767 of B and C. Signed zeros prevent all of these transformations,
7768 for reasons given above each one. */
7769
7770 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7771 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7772 arg1, TREE_OPERAND (arg0, 1))
7773 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
7774 {
7775 tree arg2 = TREE_OPERAND (t, 2);
7776 enum tree_code comp_code = TREE_CODE (arg0);
7777
7778 STRIP_NOPS (arg2);
7779
7780 /* If we have A op 0 ? A : -A, consider applying the following
7781 transformations:
7782
7783 A == 0? A : -A same as -A
7784 A != 0? A : -A same as A
7785 A >= 0? A : -A same as abs (A)
7786 A > 0? A : -A same as abs (A)
7787 A <= 0? A : -A same as -abs (A)
7788 A < 0? A : -A same as -abs (A)
7789
7790 None of these transformations work for modes with signed
7791 zeros. If A is +/-0, the first two transformations will
7792 change the sign of the result (from +0 to -0, or vice
7793 versa). The last four will fix the sign of the result,
7794 even though the original expressions could be positive or
7795 negative, depending on the sign of A.
7796
7797 Note that all these transformations are correct if A is
7798 NaN, since the two alternatives (A and -A) are also NaNs. */
7799 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
7800 ? real_zerop (TREE_OPERAND (arg0, 1))
7801 : integer_zerop (TREE_OPERAND (arg0, 1)))
7802 && TREE_CODE (arg2) == NEGATE_EXPR
7803 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
7804 switch (comp_code)
7805 {
7806 case EQ_EXPR:
7807 return
7808 pedantic_non_lvalue
7809 (convert (type,
7810 negate_expr
7811 (convert (TREE_TYPE (TREE_OPERAND (t, 1)),
7812 arg1))));
7813 case NE_EXPR:
7814 return pedantic_non_lvalue (convert (type, arg1));
7815 case GE_EXPR:
7816 case GT_EXPR:
7817 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7818 arg1 = convert ((*lang_hooks.types.signed_type)
7819 (TREE_TYPE (arg1)), arg1);
7820 return pedantic_non_lvalue
7821 (convert (type, fold (build1 (ABS_EXPR,
7822 TREE_TYPE (arg1), arg1))));
7823 case LE_EXPR:
7824 case LT_EXPR:
7825 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7826 arg1 = convert ((lang_hooks.types.signed_type)
7827 (TREE_TYPE (arg1)), arg1);
7828 return pedantic_non_lvalue
7829 (negate_expr (convert (type,
7830 fold (build1 (ABS_EXPR,
7831 TREE_TYPE (arg1),
7832 arg1)))));
7833 default:
7834 abort ();
7835 }
7836
7837 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
7838 A == 0 ? A : 0 is always 0 unless A is -0. Note that
7839 both transformations are correct when A is NaN: A != 0
7840 is then true, and A == 0 is false. */
7841
7842 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
7843 {
7844 if (comp_code == NE_EXPR)
7845 return pedantic_non_lvalue (convert (type, arg1));
7846 else if (comp_code == EQ_EXPR)
7847 return pedantic_non_lvalue (convert (type, integer_zero_node));
7848 }
7849
7850 /* Try some transformations of A op B ? A : B.
7851
7852 A == B? A : B same as B
7853 A != B? A : B same as A
7854 A >= B? A : B same as max (A, B)
7855 A > B? A : B same as max (B, A)
7856 A <= B? A : B same as min (A, B)
7857 A < B? A : B same as min (B, A)
7858
7859 As above, these transformations don't work in the presence
7860 of signed zeros. For example, if A and B are zeros of
7861 opposite sign, the first two transformations will change
7862 the sign of the result. In the last four, the original
7863 expressions give different results for (A=+0, B=-0) and
7864 (A=-0, B=+0), but the transformed expressions do not.
7865
7866 The first two transformations are correct if either A or B
7867 is a NaN. In the first transformation, the condition will
7868 be false, and B will indeed be chosen. In the case of the
7869 second transformation, the condition A != B will be true,
7870 and A will be chosen.
7871
7872 The conversions to max() and min() are not correct if B is
7873 a number and A is not. The conditions in the original
7874 expressions will be false, so all four give B. The min()
7875 and max() versions would give a NaN instead. */
7876 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
7877 arg2, TREE_OPERAND (arg0, 0)))
7878 {
7879 tree comp_op0 = TREE_OPERAND (arg0, 0);
7880 tree comp_op1 = TREE_OPERAND (arg0, 1);
7881 tree comp_type = TREE_TYPE (comp_op0);
7882
7883 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
7884 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
7885 {
7886 comp_type = type;
7887 comp_op0 = arg1;
7888 comp_op1 = arg2;
7889 }
7890
7891 switch (comp_code)
7892 {
7893 case EQ_EXPR:
7894 return pedantic_non_lvalue (convert (type, arg2));
7895 case NE_EXPR:
7896 return pedantic_non_lvalue (convert (type, arg1));
7897 case LE_EXPR:
7898 case LT_EXPR:
7899 /* In C++ a ?: expression can be an lvalue, so put the
7900 operand which will be used if they are equal first
7901 so that we can convert this back to the
7902 corresponding COND_EXPR. */
7903 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
7904 return pedantic_non_lvalue
7905 (convert (type, fold (build (MIN_EXPR, comp_type,
7906 (comp_code == LE_EXPR
7907 ? comp_op0 : comp_op1),
7908 (comp_code == LE_EXPR
7909 ? comp_op1 : comp_op0)))));
7910 break;
7911 case GE_EXPR:
7912 case GT_EXPR:
7913 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
7914 return pedantic_non_lvalue
7915 (convert (type, fold (build (MAX_EXPR, comp_type,
7916 (comp_code == GE_EXPR
7917 ? comp_op0 : comp_op1),
7918 (comp_code == GE_EXPR
7919 ? comp_op1 : comp_op0)))));
7920 break;
7921 default:
7922 abort ();
7923 }
7924 }
7925
7926 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
7927 we might still be able to simplify this. For example,
7928 if C1 is one less or one more than C2, this might have started
7929 out as a MIN or MAX and been transformed by this function.
7930 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
7931
7932 if (INTEGRAL_TYPE_P (type)
7933 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7934 && TREE_CODE (arg2) == INTEGER_CST)
7935 switch (comp_code)
7936 {
7937 case EQ_EXPR:
7938 /* We can replace A with C1 in this case. */
7939 arg1 = convert (type, TREE_OPERAND (arg0, 1));
7940 t = build (code, type, TREE_OPERAND (t, 0), arg1,
7941 TREE_OPERAND (t, 2));
7942 break;
7943
7944 case LT_EXPR:
7945 /* If C1 is C2 + 1, this is min(A, C2). */
7946 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
7947 && operand_equal_p (TREE_OPERAND (arg0, 1),
7948 const_binop (PLUS_EXPR, arg2,
7949 integer_one_node, 0), 1))
7950 return pedantic_non_lvalue
7951 (fold (build (MIN_EXPR, type, arg1, arg2)));
7952 break;
7953
7954 case LE_EXPR:
7955 /* If C1 is C2 - 1, this is min(A, C2). */
7956 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
7957 && operand_equal_p (TREE_OPERAND (arg0, 1),
7958 const_binop (MINUS_EXPR, arg2,
7959 integer_one_node, 0), 1))
7960 return pedantic_non_lvalue
7961 (fold (build (MIN_EXPR, type, arg1, arg2)));
7962 break;
7963
7964 case GT_EXPR:
7965 /* If C1 is C2 - 1, this is max(A, C2). */
7966 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
7967 && operand_equal_p (TREE_OPERAND (arg0, 1),
7968 const_binop (MINUS_EXPR, arg2,
7969 integer_one_node, 0), 1))
7970 return pedantic_non_lvalue
7971 (fold (build (MAX_EXPR, type, arg1, arg2)));
7972 break;
7973
7974 case GE_EXPR:
7975 /* If C1 is C2 + 1, this is max(A, C2). */
7976 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
7977 && operand_equal_p (TREE_OPERAND (arg0, 1),
7978 const_binop (PLUS_EXPR, arg2,
7979 integer_one_node, 0), 1))
7980 return pedantic_non_lvalue
7981 (fold (build (MAX_EXPR, type, arg1, arg2)));
7982 break;
7983 case NE_EXPR:
7984 break;
7985 default:
7986 abort ();
7987 }
7988 }
7989
7990 /* If the second operand is simpler than the third, swap them
7991 since that produces better jump optimization results. */
7992 if ((TREE_CONSTANT (arg1) || DECL_P (arg1)
7993 || TREE_CODE (arg1) == SAVE_EXPR)
7994 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
7995 || DECL_P (TREE_OPERAND (t, 2))
7996 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
7997 {
7998 /* See if this can be inverted. If it can't, possibly because
7999 it was a floating-point inequality comparison, don't do
8000 anything. */
8001 tem = invert_truthvalue (arg0);
8002
8003 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8004 {
8005 t = build (code, type, tem,
8006 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
8007 arg0 = tem;
8008 /* arg1 should be the first argument of the new T. */
8009 arg1 = TREE_OPERAND (t, 1);
8010 STRIP_NOPS (arg1);
8011 }
8012 }
8013
8014 /* Convert A ? 1 : 0 to simply A. */
8015 if (integer_onep (TREE_OPERAND (t, 1))
8016 && integer_zerop (TREE_OPERAND (t, 2))
8017 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8018 call to fold will try to move the conversion inside
8019 a COND, which will recurse. In that case, the COND_EXPR
8020 is probably the best choice, so leave it alone. */
8021 && type == TREE_TYPE (arg0))
8022 return pedantic_non_lvalue (arg0);
8023
8024 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8025 over COND_EXPR in cases such as floating point comparisons. */
8026 if (integer_zerop (TREE_OPERAND (t, 1))
8027 && integer_onep (TREE_OPERAND (t, 2))
8028 && truth_value_p (TREE_CODE (arg0)))
8029 return pedantic_non_lvalue (convert (type,
8030 invert_truthvalue (arg0)));
8031
8032 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8033 operation is simply A & 2. */
8034
8035 if (integer_zerop (TREE_OPERAND (t, 2))
8036 && TREE_CODE (arg0) == NE_EXPR
8037 && integer_zerop (TREE_OPERAND (arg0, 1))
8038 && integer_pow2p (arg1)
8039 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8040 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8041 arg1, 1))
8042 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
8043
8044 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8045 if (integer_zerop (TREE_OPERAND (t, 2))
8046 && truth_value_p (TREE_CODE (arg0))
8047 && truth_value_p (TREE_CODE (arg1)))
8048 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
8049 arg0, arg1)));
8050
8051 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8052 if (integer_onep (TREE_OPERAND (t, 2))
8053 && truth_value_p (TREE_CODE (arg0))
8054 && truth_value_p (TREE_CODE (arg1)))
8055 {
8056 /* Only perform transformation if ARG0 is easily inverted. */
8057 tem = invert_truthvalue (arg0);
8058 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8059 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
8060 tem, arg1)));
8061 }
8062
8063 return t;
8064
8065 case COMPOUND_EXPR:
8066 /* When pedantic, a compound expression can be neither an lvalue
8067 nor an integer constant expression. */
8068 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
8069 return t;
8070 /* Don't let (0, 0) be null pointer constant. */
8071 if (integer_zerop (arg1))
8072 return build1 (NOP_EXPR, type, arg1);
8073 return convert (type, arg1);
8074
8075 case COMPLEX_EXPR:
8076 if (wins)
8077 return build_complex (type, arg0, arg1);
8078 return t;
8079
8080 case REALPART_EXPR:
8081 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8082 return t;
8083 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8084 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8085 TREE_OPERAND (arg0, 1));
8086 else if (TREE_CODE (arg0) == COMPLEX_CST)
8087 return TREE_REALPART (arg0);
8088 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8089 return fold (build (TREE_CODE (arg0), type,
8090 fold (build1 (REALPART_EXPR, type,
8091 TREE_OPERAND (arg0, 0))),
8092 fold (build1 (REALPART_EXPR,
8093 type, TREE_OPERAND (arg0, 1)))));
8094 return t;
8095
8096 case IMAGPART_EXPR:
8097 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8098 return convert (type, integer_zero_node);
8099 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8100 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8101 TREE_OPERAND (arg0, 0));
8102 else if (TREE_CODE (arg0) == COMPLEX_CST)
8103 return TREE_IMAGPART (arg0);
8104 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8105 return fold (build (TREE_CODE (arg0), type,
8106 fold (build1 (IMAGPART_EXPR, type,
8107 TREE_OPERAND (arg0, 0))),
8108 fold (build1 (IMAGPART_EXPR, type,
8109 TREE_OPERAND (arg0, 1)))));
8110 return t;
8111
8112 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8113 appropriate. */
8114 case CLEANUP_POINT_EXPR:
8115 if (! has_cleanups (arg0))
8116 return TREE_OPERAND (t, 0);
8117
8118 {
8119 enum tree_code code0 = TREE_CODE (arg0);
8120 int kind0 = TREE_CODE_CLASS (code0);
8121 tree arg00 = TREE_OPERAND (arg0, 0);
8122 tree arg01;
8123
8124 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8125 return fold (build1 (code0, type,
8126 fold (build1 (CLEANUP_POINT_EXPR,
8127 TREE_TYPE (arg00), arg00))));
8128
8129 if (kind0 == '<' || kind0 == '2'
8130 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8131 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8132 || code0 == TRUTH_XOR_EXPR)
8133 {
8134 arg01 = TREE_OPERAND (arg0, 1);
8135
8136 if (TREE_CONSTANT (arg00)
8137 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8138 && ! has_cleanups (arg00)))
8139 return fold (build (code0, type, arg00,
8140 fold (build1 (CLEANUP_POINT_EXPR,
8141 TREE_TYPE (arg01), arg01))));
8142
8143 if (TREE_CONSTANT (arg01))
8144 return fold (build (code0, type,
8145 fold (build1 (CLEANUP_POINT_EXPR,
8146 TREE_TYPE (arg00), arg00)),
8147 arg01));
8148 }
8149
8150 return t;
8151 }
8152
8153 case CALL_EXPR:
8154 /* Check for a built-in function. */
8155 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
8156 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
8157 == FUNCTION_DECL)
8158 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
8159 {
8160 tree tmp = fold_builtin (expr);
8161 if (tmp)
8162 return tmp;
8163 }
8164 return t;
8165
8166 default:
8167 return t;
8168 } /* switch (code) */
8169 }
8170
8171 #ifdef ENABLE_FOLD_CHECKING
8172 #undef fold
8173
8174 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8175 static void fold_check_failed (tree, tree);
8176 void print_fold_checksum (tree);
8177
8178 /* When --enable-checking=fold, compute a digest of expr before
8179 and after actual fold call to see if fold did not accidentally
8180 change original expr. */
8181
8182 tree
8183 fold (tree expr)
8184 {
8185 tree ret;
8186 struct md5_ctx ctx;
8187 unsigned char checksum_before[16], checksum_after[16];
8188 htab_t ht;
8189
8190 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8191 md5_init_ctx (&ctx);
8192 fold_checksum_tree (expr, &ctx, ht);
8193 md5_finish_ctx (&ctx, checksum_before);
8194 htab_empty (ht);
8195
8196 ret = fold_1 (expr);
8197
8198 md5_init_ctx (&ctx);
8199 fold_checksum_tree (expr, &ctx, ht);
8200 md5_finish_ctx (&ctx, checksum_after);
8201 htab_delete (ht);
8202
8203 if (memcmp (checksum_before, checksum_after, 16))
8204 fold_check_failed (expr, ret);
8205
8206 return ret;
8207 }
8208
8209 void
8210 print_fold_checksum (tree expr)
8211 {
8212 struct md5_ctx ctx;
8213 unsigned char checksum[16], cnt;
8214 htab_t ht;
8215
8216 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8217 md5_init_ctx (&ctx);
8218 fold_checksum_tree (expr, &ctx, ht);
8219 md5_finish_ctx (&ctx, checksum);
8220 htab_delete (ht);
8221 for (cnt = 0; cnt < 16; ++cnt)
8222 fprintf (stderr, "%02x", checksum[cnt]);
8223 putc ('\n', stderr);
8224 }
8225
8226 static void
8227 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8228 {
8229 internal_error ("fold check: original tree changed by fold");
8230 }
8231
8232 static void
8233 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8234 {
8235 void **slot;
8236 enum tree_code code;
8237 char buf[sizeof (struct tree_decl)];
8238 int i, len;
8239
8240 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8241 > sizeof (struct tree_decl)
8242 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8243 abort ();
8244 if (expr == NULL)
8245 return;
8246 slot = htab_find_slot (ht, expr, INSERT);
8247 if (*slot != NULL)
8248 return;
8249 *slot = expr;
8250 code = TREE_CODE (expr);
8251 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8252 {
8253 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8254 memcpy (buf, expr, tree_size (expr));
8255 expr = (tree) buf;
8256 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8257 }
8258 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8259 {
8260 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8261 memcpy (buf, expr, tree_size (expr));
8262 expr = (tree) buf;
8263 SET_DECL_ASSEMBLER_NAME (expr, NULL);
8264 }
8265 else if (TREE_CODE_CLASS (code) == 't'
8266 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8267 {
8268 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8269 memcpy (buf, expr, tree_size (expr));
8270 expr = (tree) buf;
8271 TYPE_POINTER_TO (expr) = NULL;
8272 TYPE_REFERENCE_TO (expr) = NULL;
8273 }
8274 md5_process_bytes (expr, tree_size (expr), ctx);
8275 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8276 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8277 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8278 len = TREE_CODE_LENGTH (code);
8279 switch (TREE_CODE_CLASS (code))
8280 {
8281 case 'c':
8282 switch (code)
8283 {
8284 case STRING_CST:
8285 md5_process_bytes (TREE_STRING_POINTER (expr),
8286 TREE_STRING_LENGTH (expr), ctx);
8287 break;
8288 case COMPLEX_CST:
8289 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8290 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8291 break;
8292 case VECTOR_CST:
8293 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8294 break;
8295 default:
8296 break;
8297 }
8298 break;
8299 case 'x':
8300 switch (code)
8301 {
8302 case TREE_LIST:
8303 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8304 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8305 break;
8306 case TREE_VEC:
8307 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8308 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8309 break;
8310 default:
8311 break;
8312 }
8313 break;
8314 case 'e':
8315 switch (code)
8316 {
8317 case SAVE_EXPR: len = 2; break;
8318 case GOTO_SUBROUTINE_EXPR: len = 0; break;
8319 case RTL_EXPR: len = 0; break;
8320 case WITH_CLEANUP_EXPR: len = 2; break;
8321 default: break;
8322 }
8323 /* FALLTHROUGH */
8324 case 'r':
8325 case '<':
8326 case '1':
8327 case '2':
8328 case 's':
8329 for (i = 0; i < len; ++i)
8330 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8331 break;
8332 case 'd':
8333 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8334 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8335 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8336 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8337 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8338 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8339 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8340 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8341 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8342 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8343 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8344 break;
8345 case 't':
8346 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
8347 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
8348 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
8349 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
8350 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
8351 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
8352 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
8353 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
8354 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
8355 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
8356 break;
8357 default:
8358 break;
8359 }
8360 }
8361
8362 #endif
8363
8364 /* Perform constant folding and related simplification of initializer
8365 expression EXPR. This behaves identically to "fold" but ignores
8366 potential run-time traps and exceptions that fold must preserve. */
8367
8368 tree
8369 fold_initializer (tree expr)
8370 {
8371 int saved_signaling_nans = flag_signaling_nans;
8372 int saved_trapping_math = flag_trapping_math;
8373 int saved_trapv = flag_trapv;
8374 tree result;
8375
8376 flag_signaling_nans = 0;
8377 flag_trapping_math = 0;
8378 flag_trapv = 0;
8379
8380 result = fold (expr);
8381
8382 flag_signaling_nans = saved_signaling_nans;
8383 flag_trapping_math = saved_trapping_math;
8384 flag_trapv = saved_trapv;
8385
8386 return result;
8387 }
8388
8389 /* Determine if first argument is a multiple of second argument. Return 0 if
8390 it is not, or we cannot easily determined it to be.
8391
8392 An example of the sort of thing we care about (at this point; this routine
8393 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8394 fold cases do now) is discovering that
8395
8396 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8397
8398 is a multiple of
8399
8400 SAVE_EXPR (J * 8)
8401
8402 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8403
8404 This code also handles discovering that
8405
8406 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8407
8408 is a multiple of 8 so we don't have to worry about dealing with a
8409 possible remainder.
8410
8411 Note that we *look* inside a SAVE_EXPR only to determine how it was
8412 calculated; it is not safe for fold to do much of anything else with the
8413 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8414 at run time. For example, the latter example above *cannot* be implemented
8415 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8416 evaluation time of the original SAVE_EXPR is not necessarily the same at
8417 the time the new expression is evaluated. The only optimization of this
8418 sort that would be valid is changing
8419
8420 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8421
8422 divided by 8 to
8423
8424 SAVE_EXPR (I) * SAVE_EXPR (J)
8425
8426 (where the same SAVE_EXPR (J) is used in the original and the
8427 transformed version). */
8428
8429 static int
8430 multiple_of_p (tree type, tree top, tree bottom)
8431 {
8432 if (operand_equal_p (top, bottom, 0))
8433 return 1;
8434
8435 if (TREE_CODE (type) != INTEGER_TYPE)
8436 return 0;
8437
8438 switch (TREE_CODE (top))
8439 {
8440 case MULT_EXPR:
8441 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8442 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8443
8444 case PLUS_EXPR:
8445 case MINUS_EXPR:
8446 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8447 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8448
8449 case LSHIFT_EXPR:
8450 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
8451 {
8452 tree op1, t1;
8453
8454 op1 = TREE_OPERAND (top, 1);
8455 /* const_binop may not detect overflow correctly,
8456 so check for it explicitly here. */
8457 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
8458 > TREE_INT_CST_LOW (op1)
8459 && TREE_INT_CST_HIGH (op1) == 0
8460 && 0 != (t1 = convert (type,
8461 const_binop (LSHIFT_EXPR, size_one_node,
8462 op1, 0)))
8463 && ! TREE_OVERFLOW (t1))
8464 return multiple_of_p (type, t1, bottom);
8465 }
8466 return 0;
8467
8468 case NOP_EXPR:
8469 /* Can't handle conversions from non-integral or wider integral type. */
8470 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
8471 || (TYPE_PRECISION (type)
8472 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
8473 return 0;
8474
8475 /* .. fall through ... */
8476
8477 case SAVE_EXPR:
8478 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
8479
8480 case INTEGER_CST:
8481 if (TREE_CODE (bottom) != INTEGER_CST
8482 || (TREE_UNSIGNED (type)
8483 && (tree_int_cst_sgn (top) < 0
8484 || tree_int_cst_sgn (bottom) < 0)))
8485 return 0;
8486 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
8487 top, bottom, 0));
8488
8489 default:
8490 return 0;
8491 }
8492 }
8493
8494 /* Return true if `t' is known to be non-negative. */
8495
8496 int
8497 tree_expr_nonnegative_p (tree t)
8498 {
8499 switch (TREE_CODE (t))
8500 {
8501 case ABS_EXPR:
8502 case FFS_EXPR:
8503 case POPCOUNT_EXPR:
8504 case PARITY_EXPR:
8505 return 1;
8506
8507 case CLZ_EXPR:
8508 case CTZ_EXPR:
8509 /* These are undefined at zero. This is true even if
8510 C[LT]Z_DEFINED_VALUE_AT_ZERO is set, since what we're
8511 computing here is a user-visible property. */
8512 return 0;
8513
8514 case INTEGER_CST:
8515 return tree_int_cst_sgn (t) >= 0;
8516
8517 case REAL_CST:
8518 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
8519
8520 case PLUS_EXPR:
8521 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8522 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8523 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8524
8525 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8526 both unsigned and at least 2 bits shorter than the result. */
8527 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8528 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8529 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8530 {
8531 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8532 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8533 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8534 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8535 {
8536 unsigned int prec = MAX (TYPE_PRECISION (inner1),
8537 TYPE_PRECISION (inner2)) + 1;
8538 return prec < TYPE_PRECISION (TREE_TYPE (t));
8539 }
8540 }
8541 break;
8542
8543 case MULT_EXPR:
8544 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8545 {
8546 /* x * x for floating point x is always non-negative. */
8547 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8548 return 1;
8549 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8550 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8551 }
8552
8553 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8554 both unsigned and their total bits is shorter than the result. */
8555 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8556 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8557 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8558 {
8559 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8560 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8561 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8562 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8563 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8564 < TYPE_PRECISION (TREE_TYPE (t));
8565 }
8566 return 0;
8567
8568 case TRUNC_DIV_EXPR:
8569 case CEIL_DIV_EXPR:
8570 case FLOOR_DIV_EXPR:
8571 case ROUND_DIV_EXPR:
8572 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8573 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8574
8575 case TRUNC_MOD_EXPR:
8576 case CEIL_MOD_EXPR:
8577 case FLOOR_MOD_EXPR:
8578 case ROUND_MOD_EXPR:
8579 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8580
8581 case RDIV_EXPR:
8582 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8583 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8584
8585 case NOP_EXPR:
8586 {
8587 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8588 tree outer_type = TREE_TYPE (t);
8589
8590 if (TREE_CODE (outer_type) == REAL_TYPE)
8591 {
8592 if (TREE_CODE (inner_type) == REAL_TYPE)
8593 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8594 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8595 {
8596 if (TREE_UNSIGNED (inner_type))
8597 return 1;
8598 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8599 }
8600 }
8601 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8602 {
8603 if (TREE_CODE (inner_type) == REAL_TYPE)
8604 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8605 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8606 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8607 && TREE_UNSIGNED (inner_type);
8608 }
8609 }
8610 break;
8611
8612 case COND_EXPR:
8613 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8614 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
8615 case COMPOUND_EXPR:
8616 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8617 case MIN_EXPR:
8618 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8619 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8620 case MAX_EXPR:
8621 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8622 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8623 case MODIFY_EXPR:
8624 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8625 case BIND_EXPR:
8626 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8627 case SAVE_EXPR:
8628 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8629 case NON_LVALUE_EXPR:
8630 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8631 case FLOAT_EXPR:
8632 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8633 case RTL_EXPR:
8634 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
8635
8636 case CALL_EXPR:
8637 {
8638 tree fndecl = get_callee_fndecl (t);
8639 tree arglist = TREE_OPERAND (t, 1);
8640 if (fndecl
8641 && DECL_BUILT_IN (fndecl)
8642 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
8643 switch (DECL_FUNCTION_CODE (fndecl))
8644 {
8645 case BUILT_IN_CABS:
8646 case BUILT_IN_CABSL:
8647 case BUILT_IN_CABSF:
8648 case BUILT_IN_EXP:
8649 case BUILT_IN_EXPF:
8650 case BUILT_IN_EXPL:
8651 case BUILT_IN_FABS:
8652 case BUILT_IN_FABSF:
8653 case BUILT_IN_FABSL:
8654 case BUILT_IN_SQRT:
8655 case BUILT_IN_SQRTF:
8656 case BUILT_IN_SQRTL:
8657 return 1;
8658
8659 case BUILT_IN_ATAN:
8660 case BUILT_IN_ATANF:
8661 case BUILT_IN_ATANL:
8662 case BUILT_IN_CEIL:
8663 case BUILT_IN_CEILF:
8664 case BUILT_IN_CEILL:
8665 case BUILT_IN_FLOOR:
8666 case BUILT_IN_FLOORF:
8667 case BUILT_IN_FLOORL:
8668 case BUILT_IN_NEARBYINT:
8669 case BUILT_IN_NEARBYINTF:
8670 case BUILT_IN_NEARBYINTL:
8671 case BUILT_IN_ROUND:
8672 case BUILT_IN_ROUNDF:
8673 case BUILT_IN_ROUNDL:
8674 case BUILT_IN_TRUNC:
8675 case BUILT_IN_TRUNCF:
8676 case BUILT_IN_TRUNCL:
8677 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8678
8679 case BUILT_IN_POW:
8680 case BUILT_IN_POWF:
8681 case BUILT_IN_POWL:
8682 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8683
8684 default:
8685 break;
8686 }
8687 }
8688
8689 /* ... fall through ... */
8690
8691 default:
8692 if (truth_value_p (TREE_CODE (t)))
8693 /* Truth values evaluate to 0 or 1, which is nonnegative. */
8694 return 1;
8695 }
8696
8697 /* We don't know sign of `t', so be conservative and return false. */
8698 return 0;
8699 }
8700
8701 /* Return true if `r' is known to be non-negative.
8702 Only handles constants at the moment. */
8703
8704 int
8705 rtl_expr_nonnegative_p (rtx r)
8706 {
8707 switch (GET_CODE (r))
8708 {
8709 case CONST_INT:
8710 return INTVAL (r) >= 0;
8711
8712 case CONST_DOUBLE:
8713 if (GET_MODE (r) == VOIDmode)
8714 return CONST_DOUBLE_HIGH (r) >= 0;
8715 return 0;
8716
8717 case CONST_VECTOR:
8718 {
8719 int units, i;
8720 rtx elt;
8721
8722 units = CONST_VECTOR_NUNITS (r);
8723
8724 for (i = 0; i < units; ++i)
8725 {
8726 elt = CONST_VECTOR_ELT (r, i);
8727 if (!rtl_expr_nonnegative_p (elt))
8728 return 0;
8729 }
8730
8731 return 1;
8732 }
8733
8734 case SYMBOL_REF:
8735 case LABEL_REF:
8736 /* These are always nonnegative. */
8737 return 1;
8738
8739 default:
8740 return 0;
8741 }
8742 }
8743
8744 #include "gt-fold-const.h"