genextract.c: Convert remaining prototypes to ISO C90.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "real.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "toplev.h"
56 #include "ggc.h"
57 #include "hashtab.h"
58 #include "langhooks.h"
59
60 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
61 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
62 static bool negate_expr_p (tree);
63 static tree negate_expr (tree);
64 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
65 static tree associate_trees (tree, tree, enum tree_code, tree);
66 static tree int_const_binop (enum tree_code, tree, tree, int);
67 static tree const_binop (enum tree_code, tree, tree, int);
68 static hashval_t size_htab_hash (const void *);
69 static int size_htab_eq (const void *, const void *);
70 static tree fold_convert (tree, tree);
71 static enum tree_code invert_tree_comparison (enum tree_code);
72 static enum tree_code swap_tree_comparison (enum tree_code);
73 static int comparison_to_compcode (enum tree_code);
74 static enum tree_code compcode_to_comparison (int);
75 static int truth_value_p (enum tree_code);
76 static int operand_equal_for_comparison_p (tree, tree, tree);
77 static int twoval_comparison_p (tree, tree *, tree *, int *);
78 static tree eval_subst (tree, tree, tree, tree, tree);
79 static tree pedantic_omit_one_operand (tree, tree, tree);
80 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
81 static tree make_bit_field_ref (tree, tree, int, int, int);
82 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
83 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
84 enum machine_mode *, int *, int *,
85 tree *, tree *);
86 static int all_ones_mask_p (tree, int);
87 static tree sign_bit_p (tree, tree);
88 static int simple_operand_p (tree);
89 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
90 static tree make_range (tree, int *, tree *, tree *);
91 static tree build_range_check (tree, tree, int, tree, tree);
92 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
93 tree);
94 static tree fold_range_test (tree);
95 static tree unextend (tree, int, int, tree);
96 static tree fold_truthop (enum tree_code, tree, tree, tree);
97 static tree optimize_minmax_comparison (tree);
98 static tree extract_muldiv (tree, tree, enum tree_code, tree);
99 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
100 static tree strip_compound_expr (tree, tree);
101 static int multiple_of_p (tree, tree, tree);
102 static tree constant_boolean_node (int, tree);
103 static int count_cond (tree, int);
104 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
105 tree, int);
106 static bool fold_real_zero_addition_p (tree, tree, int);
107 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
108 tree, tree, tree);
109 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
110
111 /* The following constants represent a bit based encoding of GCC's
112 comparison operators. This encoding simplifies transformations
113 on relational comparison operators, such as AND and OR. */
114 #define COMPCODE_FALSE 0
115 #define COMPCODE_LT 1
116 #define COMPCODE_EQ 2
117 #define COMPCODE_LE 3
118 #define COMPCODE_GT 4
119 #define COMPCODE_NE 5
120 #define COMPCODE_GE 6
121 #define COMPCODE_TRUE 7
122
123 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
124 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
125 and SUM1. Then this yields nonzero if overflow occurred during the
126 addition.
127
128 Overflow occurs if A and B have the same sign, but A and SUM differ in
129 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
130 sign. */
131 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
132 \f
133 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
134 We do that by representing the two-word integer in 4 words, with only
135 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
136 number. The value of the word is LOWPART + HIGHPART * BASE. */
137
138 #define LOWPART(x) \
139 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
140 #define HIGHPART(x) \
141 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
142 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
143
144 /* Unpack a two-word integer into 4 words.
145 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
146 WORDS points to the array of HOST_WIDE_INTs. */
147
148 static void
149 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
150 {
151 words[0] = LOWPART (low);
152 words[1] = HIGHPART (low);
153 words[2] = LOWPART (hi);
154 words[3] = HIGHPART (hi);
155 }
156
157 /* Pack an array of 4 words into a two-word integer.
158 WORDS points to the array of words.
159 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
160
161 static void
162 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low, HOST_WIDE_INT *hi)
163 {
164 *low = words[0] + words[1] * BASE;
165 *hi = words[2] + words[3] * BASE;
166 }
167 \f
168 /* Make the integer constant T valid for its type by setting to 0 or 1 all
169 the bits in the constant that don't belong in the type.
170
171 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
172 nonzero, a signed overflow has already occurred in calculating T, so
173 propagate it. */
174
175 int
176 force_fit_type (tree t, int overflow)
177 {
178 unsigned HOST_WIDE_INT low;
179 HOST_WIDE_INT high;
180 unsigned int prec;
181
182 if (TREE_CODE (t) == REAL_CST)
183 {
184 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
185 Consider doing it via real_convert now. */
186 return overflow;
187 }
188
189 else if (TREE_CODE (t) != INTEGER_CST)
190 return overflow;
191
192 low = TREE_INT_CST_LOW (t);
193 high = TREE_INT_CST_HIGH (t);
194
195 if (POINTER_TYPE_P (TREE_TYPE (t)))
196 prec = POINTER_SIZE;
197 else
198 prec = TYPE_PRECISION (TREE_TYPE (t));
199
200 /* First clear all bits that are beyond the type's precision. */
201
202 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
203 ;
204 else if (prec > HOST_BITS_PER_WIDE_INT)
205 TREE_INT_CST_HIGH (t)
206 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
207 else
208 {
209 TREE_INT_CST_HIGH (t) = 0;
210 if (prec < HOST_BITS_PER_WIDE_INT)
211 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
212 }
213
214 /* Unsigned types do not suffer sign extension or overflow unless they
215 are a sizetype. */
216 if (TREE_UNSIGNED (TREE_TYPE (t))
217 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
218 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
219 return overflow;
220
221 /* If the value's sign bit is set, extend the sign. */
222 if (prec != 2 * HOST_BITS_PER_WIDE_INT
223 && (prec > HOST_BITS_PER_WIDE_INT
224 ? 0 != (TREE_INT_CST_HIGH (t)
225 & ((HOST_WIDE_INT) 1
226 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
227 : 0 != (TREE_INT_CST_LOW (t)
228 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
229 {
230 /* Value is negative:
231 set to 1 all the bits that are outside this type's precision. */
232 if (prec > HOST_BITS_PER_WIDE_INT)
233 TREE_INT_CST_HIGH (t)
234 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
235 else
236 {
237 TREE_INT_CST_HIGH (t) = -1;
238 if (prec < HOST_BITS_PER_WIDE_INT)
239 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
240 }
241 }
242
243 /* Return nonzero if signed overflow occurred. */
244 return
245 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
246 != 0);
247 }
248 \f
249 /* Add two doubleword integers with doubleword result.
250 Each argument is given as two `HOST_WIDE_INT' pieces.
251 One argument is L1 and H1; the other, L2 and H2.
252 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
253
254 int
255 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, unsigned HOST_WIDE_INT l2,
256 HOST_WIDE_INT h2, unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
257 {
258 unsigned HOST_WIDE_INT l;
259 HOST_WIDE_INT h;
260
261 l = l1 + l2;
262 h = h1 + h2 + (l < l1);
263
264 *lv = l;
265 *hv = h;
266 return OVERFLOW_SUM_SIGN (h1, h2, h);
267 }
268
269 /* Negate a doubleword integer with doubleword result.
270 Return nonzero if the operation overflows, assuming it's signed.
271 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
272 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
273
274 int
275 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, unsigned HOST_WIDE_INT *lv,
276 HOST_WIDE_INT *hv)
277 {
278 if (l1 == 0)
279 {
280 *lv = 0;
281 *hv = - h1;
282 return (*hv & h1) < 0;
283 }
284 else
285 {
286 *lv = -l1;
287 *hv = ~h1;
288 return 0;
289 }
290 }
291 \f
292 /* Multiply two doubleword integers with doubleword result.
293 Return nonzero if the operation overflows, assuming it's signed.
294 Each argument is given as two `HOST_WIDE_INT' pieces.
295 One argument is L1 and H1; the other, L2 and H2.
296 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
297
298 int
299 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, unsigned HOST_WIDE_INT l2,
300 HOST_WIDE_INT h2, unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
301 {
302 HOST_WIDE_INT arg1[4];
303 HOST_WIDE_INT arg2[4];
304 HOST_WIDE_INT prod[4 * 2];
305 unsigned HOST_WIDE_INT carry;
306 int i, j, k;
307 unsigned HOST_WIDE_INT toplow, neglow;
308 HOST_WIDE_INT tophigh, neghigh;
309
310 encode (arg1, l1, h1);
311 encode (arg2, l2, h2);
312
313 memset ((char *) prod, 0, sizeof prod);
314
315 for (i = 0; i < 4; i++)
316 {
317 carry = 0;
318 for (j = 0; j < 4; j++)
319 {
320 k = i + j;
321 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
322 carry += arg1[i] * arg2[j];
323 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
324 carry += prod[k];
325 prod[k] = LOWPART (carry);
326 carry = HIGHPART (carry);
327 }
328 prod[i + 4] = carry;
329 }
330
331 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
332
333 /* Check for overflow by calculating the top half of the answer in full;
334 it should agree with the low half's sign bit. */
335 decode (prod + 4, &toplow, &tophigh);
336 if (h1 < 0)
337 {
338 neg_double (l2, h2, &neglow, &neghigh);
339 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
340 }
341 if (h2 < 0)
342 {
343 neg_double (l1, h1, &neglow, &neghigh);
344 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
345 }
346 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
347 }
348 \f
349 /* Shift the doubleword integer in L1, H1 left by COUNT places
350 keeping only PREC bits of result.
351 Shift right if COUNT is negative.
352 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
353 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
354
355 void
356 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, HOST_WIDE_INT count,
357 unsigned int prec, unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
358 int arith)
359 {
360 unsigned HOST_WIDE_INT signmask;
361
362 if (count < 0)
363 {
364 rshift_double (l1, h1, -count, prec, lv, hv, arith);
365 return;
366 }
367
368 #ifdef SHIFT_COUNT_TRUNCATED
369 if (SHIFT_COUNT_TRUNCATED)
370 count %= prec;
371 #endif
372
373 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
374 {
375 /* Shifting by the host word size is undefined according to the
376 ANSI standard, so we must handle this as a special case. */
377 *hv = 0;
378 *lv = 0;
379 }
380 else if (count >= HOST_BITS_PER_WIDE_INT)
381 {
382 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
383 *lv = 0;
384 }
385 else
386 {
387 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
388 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
389 *lv = l1 << count;
390 }
391
392 /* Sign extend all bits that are beyond the precision. */
393
394 signmask = -((prec > HOST_BITS_PER_WIDE_INT
395 ? ((unsigned HOST_WIDE_INT) *hv
396 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
397 : (*lv >> (prec - 1))) & 1);
398
399 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
400 ;
401 else if (prec >= HOST_BITS_PER_WIDE_INT)
402 {
403 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
404 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
405 }
406 else
407 {
408 *hv = signmask;
409 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
410 *lv |= signmask << prec;
411 }
412 }
413
414 /* Shift the doubleword integer in L1, H1 right by COUNT places
415 keeping only PREC bits of result. COUNT must be positive.
416 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
417 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
418
419 void
420 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, HOST_WIDE_INT count,
421 unsigned int prec, unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
422 int arith)
423 {
424 unsigned HOST_WIDE_INT signmask;
425
426 signmask = (arith
427 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
428 : 0);
429
430 #ifdef SHIFT_COUNT_TRUNCATED
431 if (SHIFT_COUNT_TRUNCATED)
432 count %= prec;
433 #endif
434
435 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
436 {
437 /* Shifting by the host word size is undefined according to the
438 ANSI standard, so we must handle this as a special case. */
439 *hv = 0;
440 *lv = 0;
441 }
442 else if (count >= HOST_BITS_PER_WIDE_INT)
443 {
444 *hv = 0;
445 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
446 }
447 else
448 {
449 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
450 *lv = ((l1 >> count)
451 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
452 }
453
454 /* Zero / sign extend all bits that are beyond the precision. */
455
456 if (count >= (HOST_WIDE_INT)prec)
457 {
458 *hv = signmask;
459 *lv = signmask;
460 }
461 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
462 ;
463 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
464 {
465 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
466 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
467 }
468 else
469 {
470 *hv = signmask;
471 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
472 *lv |= signmask << (prec - count);
473 }
474 }
475 \f
476 /* Rotate the doubleword integer in L1, H1 left by COUNT places
477 keeping only PREC bits of result.
478 Rotate right if COUNT is negative.
479 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
480
481 void
482 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, HOST_WIDE_INT count,
483 unsigned int prec, unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
484 {
485 unsigned HOST_WIDE_INT s1l, s2l;
486 HOST_WIDE_INT s1h, s2h;
487
488 count %= prec;
489 if (count < 0)
490 count += prec;
491
492 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
493 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
494 *lv = s1l | s2l;
495 *hv = s1h | s2h;
496 }
497
498 /* Rotate the doubleword integer in L1, H1 left by COUNT places
499 keeping only PREC bits of result. COUNT must be positive.
500 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
501
502 void
503 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, HOST_WIDE_INT count,
504 unsigned int prec, unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
505 {
506 unsigned HOST_WIDE_INT s1l, s2l;
507 HOST_WIDE_INT s1h, s2h;
508
509 count %= prec;
510 if (count < 0)
511 count += prec;
512
513 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
514 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
515 *lv = s1l | s2l;
516 *hv = s1h | s2h;
517 }
518 \f
519 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
520 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
521 CODE is a tree code for a kind of division, one of
522 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
523 or EXACT_DIV_EXPR
524 It controls how the quotient is rounded to an integer.
525 Return nonzero if the operation overflows.
526 UNS nonzero says do unsigned division. */
527
528 int
529 div_and_round_double (enum tree_code code, int uns,
530 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
531 HOST_WIDE_INT hnum_orig,
532 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
533 HOST_WIDE_INT hden_orig, unsigned HOST_WIDE_INT *lquo,
534 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
535 HOST_WIDE_INT *hrem)
536 {
537 int quo_neg = 0;
538 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
539 HOST_WIDE_INT den[4], quo[4];
540 int i, j;
541 unsigned HOST_WIDE_INT work;
542 unsigned HOST_WIDE_INT carry = 0;
543 unsigned HOST_WIDE_INT lnum = lnum_orig;
544 HOST_WIDE_INT hnum = hnum_orig;
545 unsigned HOST_WIDE_INT lden = lden_orig;
546 HOST_WIDE_INT hden = hden_orig;
547 int overflow = 0;
548
549 if (hden == 0 && lden == 0)
550 overflow = 1, lden = 1;
551
552 /* calculate quotient sign and convert operands to unsigned. */
553 if (!uns)
554 {
555 if (hnum < 0)
556 {
557 quo_neg = ~ quo_neg;
558 /* (minimum integer) / (-1) is the only overflow case. */
559 if (neg_double (lnum, hnum, &lnum, &hnum)
560 && ((HOST_WIDE_INT) lden & hden) == -1)
561 overflow = 1;
562 }
563 if (hden < 0)
564 {
565 quo_neg = ~ quo_neg;
566 neg_double (lden, hden, &lden, &hden);
567 }
568 }
569
570 if (hnum == 0 && hden == 0)
571 { /* single precision */
572 *hquo = *hrem = 0;
573 /* This unsigned division rounds toward zero. */
574 *lquo = lnum / lden;
575 goto finish_up;
576 }
577
578 if (hnum == 0)
579 { /* trivial case: dividend < divisor */
580 /* hden != 0 already checked. */
581 *hquo = *lquo = 0;
582 *hrem = hnum;
583 *lrem = lnum;
584 goto finish_up;
585 }
586
587 memset ((char *) quo, 0, sizeof quo);
588
589 memset ((char *) num, 0, sizeof num); /* to zero 9th element */
590 memset ((char *) den, 0, sizeof den);
591
592 encode (num, lnum, hnum);
593 encode (den, lden, hden);
594
595 /* Special code for when the divisor < BASE. */
596 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
597 {
598 /* hnum != 0 already checked. */
599 for (i = 4 - 1; i >= 0; i--)
600 {
601 work = num[i] + carry * BASE;
602 quo[i] = work / lden;
603 carry = work % lden;
604 }
605 }
606 else
607 {
608 /* Full double precision division,
609 with thanks to Don Knuth's "Seminumerical Algorithms". */
610 int num_hi_sig, den_hi_sig;
611 unsigned HOST_WIDE_INT quo_est, scale;
612
613 /* Find the highest nonzero divisor digit. */
614 for (i = 4 - 1;; i--)
615 if (den[i] != 0)
616 {
617 den_hi_sig = i;
618 break;
619 }
620
621 /* Insure that the first digit of the divisor is at least BASE/2.
622 This is required by the quotient digit estimation algorithm. */
623
624 scale = BASE / (den[den_hi_sig] + 1);
625 if (scale > 1)
626 { /* scale divisor and dividend */
627 carry = 0;
628 for (i = 0; i <= 4 - 1; i++)
629 {
630 work = (num[i] * scale) + carry;
631 num[i] = LOWPART (work);
632 carry = HIGHPART (work);
633 }
634
635 num[4] = carry;
636 carry = 0;
637 for (i = 0; i <= 4 - 1; i++)
638 {
639 work = (den[i] * scale) + carry;
640 den[i] = LOWPART (work);
641 carry = HIGHPART (work);
642 if (den[i] != 0) den_hi_sig = i;
643 }
644 }
645
646 num_hi_sig = 4;
647
648 /* Main loop */
649 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
650 {
651 /* Guess the next quotient digit, quo_est, by dividing the first
652 two remaining dividend digits by the high order quotient digit.
653 quo_est is never low and is at most 2 high. */
654 unsigned HOST_WIDE_INT tmp;
655
656 num_hi_sig = i + den_hi_sig + 1;
657 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
658 if (num[num_hi_sig] != den[den_hi_sig])
659 quo_est = work / den[den_hi_sig];
660 else
661 quo_est = BASE - 1;
662
663 /* Refine quo_est so it's usually correct, and at most one high. */
664 tmp = work - quo_est * den[den_hi_sig];
665 if (tmp < BASE
666 && (den[den_hi_sig - 1] * quo_est
667 > (tmp * BASE + num[num_hi_sig - 2])))
668 quo_est--;
669
670 /* Try QUO_EST as the quotient digit, by multiplying the
671 divisor by QUO_EST and subtracting from the remaining dividend.
672 Keep in mind that QUO_EST is the I - 1st digit. */
673
674 carry = 0;
675 for (j = 0; j <= den_hi_sig; j++)
676 {
677 work = quo_est * den[j] + carry;
678 carry = HIGHPART (work);
679 work = num[i + j] - LOWPART (work);
680 num[i + j] = LOWPART (work);
681 carry += HIGHPART (work) != 0;
682 }
683
684 /* If quo_est was high by one, then num[i] went negative and
685 we need to correct things. */
686 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
687 {
688 quo_est--;
689 carry = 0; /* add divisor back in */
690 for (j = 0; j <= den_hi_sig; j++)
691 {
692 work = num[i + j] + den[j] + carry;
693 carry = HIGHPART (work);
694 num[i + j] = LOWPART (work);
695 }
696
697 num [num_hi_sig] += carry;
698 }
699
700 /* Store the quotient digit. */
701 quo[i] = quo_est;
702 }
703 }
704
705 decode (quo, lquo, hquo);
706
707 finish_up:
708 /* if result is negative, make it so. */
709 if (quo_neg)
710 neg_double (*lquo, *hquo, lquo, hquo);
711
712 /* compute trial remainder: rem = num - (quo * den) */
713 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
714 neg_double (*lrem, *hrem, lrem, hrem);
715 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
716
717 switch (code)
718 {
719 case TRUNC_DIV_EXPR:
720 case TRUNC_MOD_EXPR: /* round toward zero */
721 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
722 return overflow;
723
724 case FLOOR_DIV_EXPR:
725 case FLOOR_MOD_EXPR: /* round toward negative infinity */
726 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
727 {
728 /* quo = quo - 1; */
729 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
730 lquo, hquo);
731 }
732 else
733 return overflow;
734 break;
735
736 case CEIL_DIV_EXPR:
737 case CEIL_MOD_EXPR: /* round toward positive infinity */
738 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
739 {
740 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
741 lquo, hquo);
742 }
743 else
744 return overflow;
745 break;
746
747 case ROUND_DIV_EXPR:
748 case ROUND_MOD_EXPR: /* round to closest integer */
749 {
750 unsigned HOST_WIDE_INT labs_rem = *lrem;
751 HOST_WIDE_INT habs_rem = *hrem;
752 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
753 HOST_WIDE_INT habs_den = hden, htwice;
754
755 /* Get absolute values. */
756 if (*hrem < 0)
757 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
758 if (hden < 0)
759 neg_double (lden, hden, &labs_den, &habs_den);
760
761 /* If (2 * abs (lrem) >= abs (lden)) */
762 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
763 labs_rem, habs_rem, &ltwice, &htwice);
764
765 if (((unsigned HOST_WIDE_INT) habs_den
766 < (unsigned HOST_WIDE_INT) htwice)
767 || (((unsigned HOST_WIDE_INT) habs_den
768 == (unsigned HOST_WIDE_INT) htwice)
769 && (labs_den < ltwice)))
770 {
771 if (*hquo < 0)
772 /* quo = quo - 1; */
773 add_double (*lquo, *hquo,
774 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
775 else
776 /* quo = quo + 1; */
777 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
778 lquo, hquo);
779 }
780 else
781 return overflow;
782 }
783 break;
784
785 default:
786 abort ();
787 }
788
789 /* compute true remainder: rem = num - (quo * den) */
790 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
791 neg_double (*lrem, *hrem, lrem, hrem);
792 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
793 return overflow;
794 }
795 \f
796 /* Determine whether an expression T can be cheaply negated using
797 the function negate_expr. */
798
799 static bool
800 negate_expr_p (tree t)
801 {
802 unsigned HOST_WIDE_INT val;
803 unsigned int prec;
804 tree type;
805
806 if (t == 0)
807 return false;
808
809 type = TREE_TYPE (t);
810
811 STRIP_SIGN_NOPS (t);
812 switch (TREE_CODE (t))
813 {
814 case INTEGER_CST:
815 if (TREE_UNSIGNED (type))
816 return false;
817
818 /* Check that -CST will not overflow type. */
819 prec = TYPE_PRECISION (type);
820 if (prec > HOST_BITS_PER_WIDE_INT)
821 {
822 if (TREE_INT_CST_LOW (t) != 0)
823 return true;
824 prec -= HOST_BITS_PER_WIDE_INT;
825 val = TREE_INT_CST_HIGH (t);
826 }
827 else
828 val = TREE_INT_CST_LOW (t);
829 if (prec < HOST_BITS_PER_WIDE_INT)
830 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
831 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
832
833 case REAL_CST:
834 case NEGATE_EXPR:
835 case MINUS_EXPR:
836 return true;
837
838 default:
839 break;
840 }
841 return false;
842 }
843
844 /* Given T, an expression, return the negation of T. Allow for T to be
845 null, in which case return null. */
846
847 static tree
848 negate_expr (tree t)
849 {
850 tree type;
851 tree tem;
852
853 if (t == 0)
854 return 0;
855
856 type = TREE_TYPE (t);
857 STRIP_SIGN_NOPS (t);
858
859 switch (TREE_CODE (t))
860 {
861 case INTEGER_CST:
862 case REAL_CST:
863 if (! TREE_UNSIGNED (type)
864 && 0 != (tem = fold (build1 (NEGATE_EXPR, type, t)))
865 && ! TREE_OVERFLOW (tem))
866 return tem;
867 break;
868
869 case NEGATE_EXPR:
870 return convert (type, TREE_OPERAND (t, 0));
871
872 case MINUS_EXPR:
873 /* - (A - B) -> B - A */
874 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
875 return convert (type,
876 fold (build (MINUS_EXPR, TREE_TYPE (t),
877 TREE_OPERAND (t, 1),
878 TREE_OPERAND (t, 0))));
879 break;
880
881 default:
882 break;
883 }
884
885 return convert (type, fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t)));
886 }
887 \f
888 /* Split a tree IN into a constant, literal and variable parts that could be
889 combined with CODE to make IN. "constant" means an expression with
890 TREE_CONSTANT but that isn't an actual constant. CODE must be a
891 commutative arithmetic operation. Store the constant part into *CONP,
892 the literal in *LITP and return the variable part. If a part isn't
893 present, set it to null. If the tree does not decompose in this way,
894 return the entire tree as the variable part and the other parts as null.
895
896 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
897 case, we negate an operand that was subtracted. Except if it is a
898 literal for which we use *MINUS_LITP instead.
899
900 If NEGATE_P is true, we are negating all of IN, again except a literal
901 for which we use *MINUS_LITP instead.
902
903 If IN is itself a literal or constant, return it as appropriate.
904
905 Note that we do not guarantee that any of the three values will be the
906 same type as IN, but they will have the same signedness and mode. */
907
908 static tree
909 split_tree (tree in, enum tree_code code, tree *conp, tree *litp, tree *minus_litp, int negate_p)
910 {
911 tree var = 0;
912
913 *conp = 0;
914 *litp = 0;
915 *minus_litp = 0;
916
917 /* Strip any conversions that don't change the machine mode or signedness. */
918 STRIP_SIGN_NOPS (in);
919
920 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
921 *litp = in;
922 else if (TREE_CODE (in) == code
923 || (! FLOAT_TYPE_P (TREE_TYPE (in))
924 /* We can associate addition and subtraction together (even
925 though the C standard doesn't say so) for integers because
926 the value is not affected. For reals, the value might be
927 affected, so we can't. */
928 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
929 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
930 {
931 tree op0 = TREE_OPERAND (in, 0);
932 tree op1 = TREE_OPERAND (in, 1);
933 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
934 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
935
936 /* First see if either of the operands is a literal, then a constant. */
937 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
938 *litp = op0, op0 = 0;
939 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
940 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
941
942 if (op0 != 0 && TREE_CONSTANT (op0))
943 *conp = op0, op0 = 0;
944 else if (op1 != 0 && TREE_CONSTANT (op1))
945 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
946
947 /* If we haven't dealt with either operand, this is not a case we can
948 decompose. Otherwise, VAR is either of the ones remaining, if any. */
949 if (op0 != 0 && op1 != 0)
950 var = in;
951 else if (op0 != 0)
952 var = op0;
953 else
954 var = op1, neg_var_p = neg1_p;
955
956 /* Now do any needed negations. */
957 if (neg_litp_p)
958 *minus_litp = *litp, *litp = 0;
959 if (neg_conp_p)
960 *conp = negate_expr (*conp);
961 if (neg_var_p)
962 var = negate_expr (var);
963 }
964 else if (TREE_CONSTANT (in))
965 *conp = in;
966 else
967 var = in;
968
969 if (negate_p)
970 {
971 if (*litp)
972 *minus_litp = *litp, *litp = 0;
973 else if (*minus_litp)
974 *litp = *minus_litp, *minus_litp = 0;
975 *conp = negate_expr (*conp);
976 var = negate_expr (var);
977 }
978
979 return var;
980 }
981
982 /* Re-associate trees split by the above function. T1 and T2 are either
983 expressions to associate or null. Return the new expression, if any. If
984 we build an operation, do it in TYPE and with CODE. */
985
986 static tree
987 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
988 {
989 if (t1 == 0)
990 return t2;
991 else if (t2 == 0)
992 return t1;
993
994 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
995 try to fold this since we will have infinite recursion. But do
996 deal with any NEGATE_EXPRs. */
997 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
998 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
999 {
1000 if (code == PLUS_EXPR)
1001 {
1002 if (TREE_CODE (t1) == NEGATE_EXPR)
1003 return build (MINUS_EXPR, type, convert (type, t2),
1004 convert (type, TREE_OPERAND (t1, 0)));
1005 else if (TREE_CODE (t2) == NEGATE_EXPR)
1006 return build (MINUS_EXPR, type, convert (type, t1),
1007 convert (type, TREE_OPERAND (t2, 0)));
1008 }
1009 return build (code, type, convert (type, t1), convert (type, t2));
1010 }
1011
1012 return fold (build (code, type, convert (type, t1), convert (type, t2)));
1013 }
1014 \f
1015 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1016 to produce a new constant.
1017
1018 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1019
1020 static tree
1021 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1022 {
1023 unsigned HOST_WIDE_INT int1l, int2l;
1024 HOST_WIDE_INT int1h, int2h;
1025 unsigned HOST_WIDE_INT low;
1026 HOST_WIDE_INT hi;
1027 unsigned HOST_WIDE_INT garbagel;
1028 HOST_WIDE_INT garbageh;
1029 tree t;
1030 tree type = TREE_TYPE (arg1);
1031 int uns = TREE_UNSIGNED (type);
1032 int is_sizetype
1033 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1034 int overflow = 0;
1035 int no_overflow = 0;
1036
1037 int1l = TREE_INT_CST_LOW (arg1);
1038 int1h = TREE_INT_CST_HIGH (arg1);
1039 int2l = TREE_INT_CST_LOW (arg2);
1040 int2h = TREE_INT_CST_HIGH (arg2);
1041
1042 switch (code)
1043 {
1044 case BIT_IOR_EXPR:
1045 low = int1l | int2l, hi = int1h | int2h;
1046 break;
1047
1048 case BIT_XOR_EXPR:
1049 low = int1l ^ int2l, hi = int1h ^ int2h;
1050 break;
1051
1052 case BIT_AND_EXPR:
1053 low = int1l & int2l, hi = int1h & int2h;
1054 break;
1055
1056 case BIT_ANDTC_EXPR:
1057 low = int1l & ~int2l, hi = int1h & ~int2h;
1058 break;
1059
1060 case RSHIFT_EXPR:
1061 int2l = -int2l;
1062 case LSHIFT_EXPR:
1063 /* It's unclear from the C standard whether shifts can overflow.
1064 The following code ignores overflow; perhaps a C standard
1065 interpretation ruling is needed. */
1066 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1067 &low, &hi, !uns);
1068 no_overflow = 1;
1069 break;
1070
1071 case RROTATE_EXPR:
1072 int2l = - int2l;
1073 case LROTATE_EXPR:
1074 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1075 &low, &hi);
1076 break;
1077
1078 case PLUS_EXPR:
1079 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1080 break;
1081
1082 case MINUS_EXPR:
1083 neg_double (int2l, int2h, &low, &hi);
1084 add_double (int1l, int1h, low, hi, &low, &hi);
1085 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1086 break;
1087
1088 case MULT_EXPR:
1089 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1090 break;
1091
1092 case TRUNC_DIV_EXPR:
1093 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1094 case EXACT_DIV_EXPR:
1095 /* This is a shortcut for a common special case. */
1096 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1097 && ! TREE_CONSTANT_OVERFLOW (arg1)
1098 && ! TREE_CONSTANT_OVERFLOW (arg2)
1099 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1100 {
1101 if (code == CEIL_DIV_EXPR)
1102 int1l += int2l - 1;
1103
1104 low = int1l / int2l, hi = 0;
1105 break;
1106 }
1107
1108 /* ... fall through ... */
1109
1110 case ROUND_DIV_EXPR:
1111 if (int2h == 0 && int2l == 1)
1112 {
1113 low = int1l, hi = int1h;
1114 break;
1115 }
1116 if (int1l == int2l && int1h == int2h
1117 && ! (int1l == 0 && int1h == 0))
1118 {
1119 low = 1, hi = 0;
1120 break;
1121 }
1122 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1123 &low, &hi, &garbagel, &garbageh);
1124 break;
1125
1126 case TRUNC_MOD_EXPR:
1127 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1128 /* This is a shortcut for a common special case. */
1129 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1130 && ! TREE_CONSTANT_OVERFLOW (arg1)
1131 && ! TREE_CONSTANT_OVERFLOW (arg2)
1132 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1133 {
1134 if (code == CEIL_MOD_EXPR)
1135 int1l += int2l - 1;
1136 low = int1l % int2l, hi = 0;
1137 break;
1138 }
1139
1140 /* ... fall through ... */
1141
1142 case ROUND_MOD_EXPR:
1143 overflow = div_and_round_double (code, uns,
1144 int1l, int1h, int2l, int2h,
1145 &garbagel, &garbageh, &low, &hi);
1146 break;
1147
1148 case MIN_EXPR:
1149 case MAX_EXPR:
1150 if (uns)
1151 low = (((unsigned HOST_WIDE_INT) int1h
1152 < (unsigned HOST_WIDE_INT) int2h)
1153 || (((unsigned HOST_WIDE_INT) int1h
1154 == (unsigned HOST_WIDE_INT) int2h)
1155 && int1l < int2l));
1156 else
1157 low = (int1h < int2h
1158 || (int1h == int2h && int1l < int2l));
1159
1160 if (low == (code == MIN_EXPR))
1161 low = int1l, hi = int1h;
1162 else
1163 low = int2l, hi = int2h;
1164 break;
1165
1166 default:
1167 abort ();
1168 }
1169
1170 /* If this is for a sizetype, can be represented as one (signed)
1171 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1172 constants. */
1173 if (is_sizetype
1174 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1175 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1176 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1177 return size_int_type_wide (low, type);
1178 else
1179 {
1180 t = build_int_2 (low, hi);
1181 TREE_TYPE (t) = TREE_TYPE (arg1);
1182 }
1183
1184 TREE_OVERFLOW (t)
1185 = ((notrunc
1186 ? (!uns || is_sizetype) && overflow
1187 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1188 && ! no_overflow))
1189 | TREE_OVERFLOW (arg1)
1190 | TREE_OVERFLOW (arg2));
1191
1192 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1193 So check if force_fit_type truncated the value. */
1194 if (is_sizetype
1195 && ! TREE_OVERFLOW (t)
1196 && (TREE_INT_CST_HIGH (t) != hi
1197 || TREE_INT_CST_LOW (t) != low))
1198 TREE_OVERFLOW (t) = 1;
1199
1200 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1201 | TREE_CONSTANT_OVERFLOW (arg1)
1202 | TREE_CONSTANT_OVERFLOW (arg2));
1203 return t;
1204 }
1205
1206 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1207 constant. We assume ARG1 and ARG2 have the same data type, or at least
1208 are the same kind of constant and the same machine mode.
1209
1210 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1211
1212 static tree
1213 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1214 {
1215 STRIP_NOPS (arg1);
1216 STRIP_NOPS (arg2);
1217
1218 if (TREE_CODE (arg1) == INTEGER_CST)
1219 return int_const_binop (code, arg1, arg2, notrunc);
1220
1221 if (TREE_CODE (arg1) == REAL_CST)
1222 {
1223 REAL_VALUE_TYPE d1;
1224 REAL_VALUE_TYPE d2;
1225 REAL_VALUE_TYPE value;
1226 tree t;
1227
1228 d1 = TREE_REAL_CST (arg1);
1229 d2 = TREE_REAL_CST (arg2);
1230
1231 /* If either operand is a NaN, just return it. Otherwise, set up
1232 for floating-point trap; we return an overflow. */
1233 if (REAL_VALUE_ISNAN (d1))
1234 return arg1;
1235 else if (REAL_VALUE_ISNAN (d2))
1236 return arg2;
1237
1238 REAL_ARITHMETIC (value, code, d1, d2);
1239
1240 t = build_real (TREE_TYPE (arg1),
1241 real_value_truncate (TYPE_MODE (TREE_TYPE (arg1)),
1242 value));
1243
1244 TREE_OVERFLOW (t)
1245 = (force_fit_type (t, 0)
1246 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1247 TREE_CONSTANT_OVERFLOW (t)
1248 = TREE_OVERFLOW (t)
1249 | TREE_CONSTANT_OVERFLOW (arg1)
1250 | TREE_CONSTANT_OVERFLOW (arg2);
1251 return t;
1252 }
1253 if (TREE_CODE (arg1) == COMPLEX_CST)
1254 {
1255 tree type = TREE_TYPE (arg1);
1256 tree r1 = TREE_REALPART (arg1);
1257 tree i1 = TREE_IMAGPART (arg1);
1258 tree r2 = TREE_REALPART (arg2);
1259 tree i2 = TREE_IMAGPART (arg2);
1260 tree t;
1261
1262 switch (code)
1263 {
1264 case PLUS_EXPR:
1265 t = build_complex (type,
1266 const_binop (PLUS_EXPR, r1, r2, notrunc),
1267 const_binop (PLUS_EXPR, i1, i2, notrunc));
1268 break;
1269
1270 case MINUS_EXPR:
1271 t = build_complex (type,
1272 const_binop (MINUS_EXPR, r1, r2, notrunc),
1273 const_binop (MINUS_EXPR, i1, i2, notrunc));
1274 break;
1275
1276 case MULT_EXPR:
1277 t = build_complex (type,
1278 const_binop (MINUS_EXPR,
1279 const_binop (MULT_EXPR,
1280 r1, r2, notrunc),
1281 const_binop (MULT_EXPR,
1282 i1, i2, notrunc),
1283 notrunc),
1284 const_binop (PLUS_EXPR,
1285 const_binop (MULT_EXPR,
1286 r1, i2, notrunc),
1287 const_binop (MULT_EXPR,
1288 i1, r2, notrunc),
1289 notrunc));
1290 break;
1291
1292 case RDIV_EXPR:
1293 {
1294 tree magsquared
1295 = const_binop (PLUS_EXPR,
1296 const_binop (MULT_EXPR, r2, r2, notrunc),
1297 const_binop (MULT_EXPR, i2, i2, notrunc),
1298 notrunc);
1299
1300 t = build_complex (type,
1301 const_binop
1302 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1303 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1304 const_binop (PLUS_EXPR,
1305 const_binop (MULT_EXPR, r1, r2,
1306 notrunc),
1307 const_binop (MULT_EXPR, i1, i2,
1308 notrunc),
1309 notrunc),
1310 magsquared, notrunc),
1311 const_binop
1312 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1313 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1314 const_binop (MINUS_EXPR,
1315 const_binop (MULT_EXPR, i1, r2,
1316 notrunc),
1317 const_binop (MULT_EXPR, r1, i2,
1318 notrunc),
1319 notrunc),
1320 magsquared, notrunc));
1321 }
1322 break;
1323
1324 default:
1325 abort ();
1326 }
1327 return t;
1328 }
1329 return 0;
1330 }
1331
1332 /* These are the hash table functions for the hash table of INTEGER_CST
1333 nodes of a sizetype. */
1334
1335 /* Return the hash code code X, an INTEGER_CST. */
1336
1337 static hashval_t
1338 size_htab_hash (const void *x)
1339 {
1340 tree t = (tree) x;
1341
1342 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1343 ^ htab_hash_pointer (TREE_TYPE (t))
1344 ^ (TREE_OVERFLOW (t) << 20));
1345 }
1346
1347 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1348 is the same as that given by *Y, which is the same. */
1349
1350 static int
1351 size_htab_eq (const void *x, const void *y)
1352 {
1353 tree xt = (tree) x;
1354 tree yt = (tree) y;
1355
1356 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1357 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1358 && TREE_TYPE (xt) == TREE_TYPE (yt)
1359 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1360 }
1361 \f
1362 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1363 bits are given by NUMBER and of the sizetype represented by KIND. */
1364
1365 tree
1366 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1367 {
1368 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1369 }
1370
1371 /* Likewise, but the desired type is specified explicitly. */
1372
1373 static GTY (()) tree new_const;
1374 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1375 htab_t size_htab;
1376
1377 tree
1378 size_int_type_wide (HOST_WIDE_INT number, tree type)
1379 {
1380 void **slot;
1381
1382 if (size_htab == 0)
1383 {
1384 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1385 new_const = make_node (INTEGER_CST);
1386 }
1387
1388 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1389 hash table, we return the value from the hash table. Otherwise, we
1390 place that in the hash table and make a new node for the next time. */
1391 TREE_INT_CST_LOW (new_const) = number;
1392 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1393 TREE_TYPE (new_const) = type;
1394 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1395 = force_fit_type (new_const, 0);
1396
1397 slot = htab_find_slot (size_htab, new_const, INSERT);
1398 if (*slot == 0)
1399 {
1400 tree t = new_const;
1401
1402 *slot = new_const;
1403 new_const = make_node (INTEGER_CST);
1404 return t;
1405 }
1406 else
1407 return (tree) *slot;
1408 }
1409
1410 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1411 is a tree code. The type of the result is taken from the operands.
1412 Both must be the same type integer type and it must be a size type.
1413 If the operands are constant, so is the result. */
1414
1415 tree
1416 size_binop (enum tree_code code, tree arg0, tree arg1)
1417 {
1418 tree type = TREE_TYPE (arg0);
1419
1420 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1421 || type != TREE_TYPE (arg1))
1422 abort ();
1423
1424 /* Handle the special case of two integer constants faster. */
1425 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1426 {
1427 /* And some specific cases even faster than that. */
1428 if (code == PLUS_EXPR && integer_zerop (arg0))
1429 return arg1;
1430 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1431 && integer_zerop (arg1))
1432 return arg0;
1433 else if (code == MULT_EXPR && integer_onep (arg0))
1434 return arg1;
1435
1436 /* Handle general case of two integer constants. */
1437 return int_const_binop (code, arg0, arg1, 0);
1438 }
1439
1440 if (arg0 == error_mark_node || arg1 == error_mark_node)
1441 return error_mark_node;
1442
1443 return fold (build (code, type, arg0, arg1));
1444 }
1445
1446 /* Given two values, either both of sizetype or both of bitsizetype,
1447 compute the difference between the two values. Return the value
1448 in signed type corresponding to the type of the operands. */
1449
1450 tree
1451 size_diffop (tree arg0, tree arg1)
1452 {
1453 tree type = TREE_TYPE (arg0);
1454 tree ctype;
1455
1456 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1457 || type != TREE_TYPE (arg1))
1458 abort ();
1459
1460 /* If the type is already signed, just do the simple thing. */
1461 if (! TREE_UNSIGNED (type))
1462 return size_binop (MINUS_EXPR, arg0, arg1);
1463
1464 ctype = (type == bitsizetype || type == ubitsizetype
1465 ? sbitsizetype : ssizetype);
1466
1467 /* If either operand is not a constant, do the conversions to the signed
1468 type and subtract. The hardware will do the right thing with any
1469 overflow in the subtraction. */
1470 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1471 return size_binop (MINUS_EXPR, convert (ctype, arg0),
1472 convert (ctype, arg1));
1473
1474 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1475 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1476 overflow) and negate (which can't either). Special-case a result
1477 of zero while we're here. */
1478 if (tree_int_cst_equal (arg0, arg1))
1479 return convert (ctype, integer_zero_node);
1480 else if (tree_int_cst_lt (arg1, arg0))
1481 return convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1482 else
1483 return size_binop (MINUS_EXPR, convert (ctype, integer_zero_node),
1484 convert (ctype, size_binop (MINUS_EXPR, arg1, arg0)));
1485 }
1486 \f
1487
1488 /* Given T, a tree representing type conversion of ARG1, a constant,
1489 return a constant tree representing the result of conversion. */
1490
1491 static tree
1492 fold_convert (tree t, tree arg1)
1493 {
1494 tree type = TREE_TYPE (t);
1495 int overflow = 0;
1496
1497 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1498 {
1499 if (TREE_CODE (arg1) == INTEGER_CST)
1500 {
1501 /* If we would build a constant wider than GCC supports,
1502 leave the conversion unfolded. */
1503 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1504 return t;
1505
1506 /* If we are trying to make a sizetype for a small integer, use
1507 size_int to pick up cached types to reduce duplicate nodes. */
1508 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1509 && !TREE_CONSTANT_OVERFLOW (arg1)
1510 && compare_tree_int (arg1, 10000) < 0)
1511 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1512
1513 /* Given an integer constant, make new constant with new type,
1514 appropriately sign-extended or truncated. */
1515 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1516 TREE_INT_CST_HIGH (arg1));
1517 TREE_TYPE (t) = type;
1518 /* Indicate an overflow if (1) ARG1 already overflowed,
1519 or (2) force_fit_type indicates an overflow.
1520 Tell force_fit_type that an overflow has already occurred
1521 if ARG1 is a too-large unsigned value and T is signed.
1522 But don't indicate an overflow if converting a pointer. */
1523 TREE_OVERFLOW (t)
1524 = ((force_fit_type (t,
1525 (TREE_INT_CST_HIGH (arg1) < 0
1526 && (TREE_UNSIGNED (type)
1527 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1528 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1529 || TREE_OVERFLOW (arg1));
1530 TREE_CONSTANT_OVERFLOW (t)
1531 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1532 }
1533 else if (TREE_CODE (arg1) == REAL_CST)
1534 {
1535 /* Don't initialize these, use assignments.
1536 Initialized local aggregates don't work on old compilers. */
1537 REAL_VALUE_TYPE x;
1538 REAL_VALUE_TYPE l;
1539 REAL_VALUE_TYPE u;
1540 tree type1 = TREE_TYPE (arg1);
1541 int no_upper_bound;
1542
1543 x = TREE_REAL_CST (arg1);
1544 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1545
1546 no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
1547 if (!no_upper_bound)
1548 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1549
1550 /* See if X will be in range after truncation towards 0.
1551 To compensate for truncation, move the bounds away from 0,
1552 but reject if X exactly equals the adjusted bounds. */
1553 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1554 if (!no_upper_bound)
1555 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1556 /* If X is a NaN, use zero instead and show we have an overflow.
1557 Otherwise, range check. */
1558 if (REAL_VALUE_ISNAN (x))
1559 overflow = 1, x = dconst0;
1560 else if (! (REAL_VALUES_LESS (l, x)
1561 && !no_upper_bound
1562 && REAL_VALUES_LESS (x, u)))
1563 overflow = 1;
1564
1565 {
1566 HOST_WIDE_INT low, high;
1567 REAL_VALUE_TO_INT (&low, &high, x);
1568 t = build_int_2 (low, high);
1569 }
1570 TREE_TYPE (t) = type;
1571 TREE_OVERFLOW (t)
1572 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1573 TREE_CONSTANT_OVERFLOW (t)
1574 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1575 }
1576 TREE_TYPE (t) = type;
1577 }
1578 else if (TREE_CODE (type) == REAL_TYPE)
1579 {
1580 if (TREE_CODE (arg1) == INTEGER_CST)
1581 return build_real_from_int_cst (type, arg1);
1582 if (TREE_CODE (arg1) == REAL_CST)
1583 {
1584 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1585 {
1586 /* We make a copy of ARG1 so that we don't modify an
1587 existing constant tree. */
1588 t = copy_node (arg1);
1589 TREE_TYPE (t) = type;
1590 return t;
1591 }
1592
1593 t = build_real (type,
1594 real_value_truncate (TYPE_MODE (type),
1595 TREE_REAL_CST (arg1)));
1596
1597 TREE_OVERFLOW (t)
1598 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1599 TREE_CONSTANT_OVERFLOW (t)
1600 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1601 return t;
1602 }
1603 }
1604 TREE_CONSTANT (t) = 1;
1605 return t;
1606 }
1607 \f
1608 /* Return an expr equal to X but certainly not valid as an lvalue. */
1609
1610 tree
1611 non_lvalue (tree x)
1612 {
1613 tree result;
1614
1615 /* These things are certainly not lvalues. */
1616 if (TREE_CODE (x) == NON_LVALUE_EXPR
1617 || TREE_CODE (x) == INTEGER_CST
1618 || TREE_CODE (x) == REAL_CST
1619 || TREE_CODE (x) == STRING_CST
1620 || TREE_CODE (x) == ADDR_EXPR)
1621 return x;
1622
1623 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1624 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1625 return result;
1626 }
1627
1628 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1629 Zero means allow extended lvalues. */
1630
1631 int pedantic_lvalues;
1632
1633 /* When pedantic, return an expr equal to X but certainly not valid as a
1634 pedantic lvalue. Otherwise, return X. */
1635
1636 tree
1637 pedantic_non_lvalue (tree x)
1638 {
1639 if (pedantic_lvalues)
1640 return non_lvalue (x);
1641 else
1642 return x;
1643 }
1644 \f
1645 /* Given a tree comparison code, return the code that is the logical inverse
1646 of the given code. It is not safe to do this for floating-point
1647 comparisons, except for NE_EXPR and EQ_EXPR. */
1648
1649 static enum tree_code
1650 invert_tree_comparison (enum tree_code code)
1651 {
1652 switch (code)
1653 {
1654 case EQ_EXPR:
1655 return NE_EXPR;
1656 case NE_EXPR:
1657 return EQ_EXPR;
1658 case GT_EXPR:
1659 return LE_EXPR;
1660 case GE_EXPR:
1661 return LT_EXPR;
1662 case LT_EXPR:
1663 return GE_EXPR;
1664 case LE_EXPR:
1665 return GT_EXPR;
1666 default:
1667 abort ();
1668 }
1669 }
1670
1671 /* Similar, but return the comparison that results if the operands are
1672 swapped. This is safe for floating-point. */
1673
1674 static enum tree_code
1675 swap_tree_comparison (enum tree_code code)
1676 {
1677 switch (code)
1678 {
1679 case EQ_EXPR:
1680 case NE_EXPR:
1681 return code;
1682 case GT_EXPR:
1683 return LT_EXPR;
1684 case GE_EXPR:
1685 return LE_EXPR;
1686 case LT_EXPR:
1687 return GT_EXPR;
1688 case LE_EXPR:
1689 return GE_EXPR;
1690 default:
1691 abort ();
1692 }
1693 }
1694
1695
1696 /* Convert a comparison tree code from an enum tree_code representation
1697 into a compcode bit-based encoding. This function is the inverse of
1698 compcode_to_comparison. */
1699
1700 static int
1701 comparison_to_compcode (enum tree_code code)
1702 {
1703 switch (code)
1704 {
1705 case LT_EXPR:
1706 return COMPCODE_LT;
1707 case EQ_EXPR:
1708 return COMPCODE_EQ;
1709 case LE_EXPR:
1710 return COMPCODE_LE;
1711 case GT_EXPR:
1712 return COMPCODE_GT;
1713 case NE_EXPR:
1714 return COMPCODE_NE;
1715 case GE_EXPR:
1716 return COMPCODE_GE;
1717 default:
1718 abort ();
1719 }
1720 }
1721
1722 /* Convert a compcode bit-based encoding of a comparison operator back
1723 to GCC's enum tree_code representation. This function is the
1724 inverse of comparison_to_compcode. */
1725
1726 static enum tree_code
1727 compcode_to_comparison (int code)
1728 {
1729 switch (code)
1730 {
1731 case COMPCODE_LT:
1732 return LT_EXPR;
1733 case COMPCODE_EQ:
1734 return EQ_EXPR;
1735 case COMPCODE_LE:
1736 return LE_EXPR;
1737 case COMPCODE_GT:
1738 return GT_EXPR;
1739 case COMPCODE_NE:
1740 return NE_EXPR;
1741 case COMPCODE_GE:
1742 return GE_EXPR;
1743 default:
1744 abort ();
1745 }
1746 }
1747
1748 /* Return nonzero if CODE is a tree code that represents a truth value. */
1749
1750 static int
1751 truth_value_p (enum tree_code code)
1752 {
1753 return (TREE_CODE_CLASS (code) == '<'
1754 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1755 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1756 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1757 }
1758 \f
1759 /* Return nonzero if two operands are necessarily equal.
1760 If ONLY_CONST is nonzero, only return nonzero for constants.
1761 This function tests whether the operands are indistinguishable;
1762 it does not test whether they are equal using C's == operation.
1763 The distinction is important for IEEE floating point, because
1764 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1765 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1766
1767 int
1768 operand_equal_p (tree arg0, tree arg1, int only_const)
1769 {
1770 /* If both types don't have the same signedness, then we can't consider
1771 them equal. We must check this before the STRIP_NOPS calls
1772 because they may change the signedness of the arguments. */
1773 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1774 return 0;
1775
1776 STRIP_NOPS (arg0);
1777 STRIP_NOPS (arg1);
1778
1779 if (TREE_CODE (arg0) != TREE_CODE (arg1)
1780 /* This is needed for conversions and for COMPONENT_REF.
1781 Might as well play it safe and always test this. */
1782 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
1783 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
1784 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1785 return 0;
1786
1787 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1788 We don't care about side effects in that case because the SAVE_EXPR
1789 takes care of that for us. In all other cases, two expressions are
1790 equal if they have no side effects. If we have two identical
1791 expressions with side effects that should be treated the same due
1792 to the only side effects being identical SAVE_EXPR's, that will
1793 be detected in the recursive calls below. */
1794 if (arg0 == arg1 && ! only_const
1795 && (TREE_CODE (arg0) == SAVE_EXPR
1796 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
1797 return 1;
1798
1799 /* Next handle constant cases, those for which we can return 1 even
1800 if ONLY_CONST is set. */
1801 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
1802 switch (TREE_CODE (arg0))
1803 {
1804 case INTEGER_CST:
1805 return (! TREE_CONSTANT_OVERFLOW (arg0)
1806 && ! TREE_CONSTANT_OVERFLOW (arg1)
1807 && tree_int_cst_equal (arg0, arg1));
1808
1809 case REAL_CST:
1810 return (! TREE_CONSTANT_OVERFLOW (arg0)
1811 && ! TREE_CONSTANT_OVERFLOW (arg1)
1812 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
1813 TREE_REAL_CST (arg1)));
1814
1815 case VECTOR_CST:
1816 {
1817 tree v1, v2;
1818
1819 if (TREE_CONSTANT_OVERFLOW (arg0)
1820 || TREE_CONSTANT_OVERFLOW (arg1))
1821 return 0;
1822
1823 v1 = TREE_VECTOR_CST_ELTS (arg0);
1824 v2 = TREE_VECTOR_CST_ELTS (arg1);
1825 while (v1 && v2)
1826 {
1827 if (!operand_equal_p (v1, v2, only_const))
1828 return 0;
1829 v1 = TREE_CHAIN (v1);
1830 v2 = TREE_CHAIN (v2);
1831 }
1832
1833 return 1;
1834 }
1835
1836 case COMPLEX_CST:
1837 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
1838 only_const)
1839 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
1840 only_const));
1841
1842 case STRING_CST:
1843 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
1844 && ! memcmp (TREE_STRING_POINTER (arg0),
1845 TREE_STRING_POINTER (arg1),
1846 TREE_STRING_LENGTH (arg0)));
1847
1848 case ADDR_EXPR:
1849 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
1850 0);
1851 default:
1852 break;
1853 }
1854
1855 if (only_const)
1856 return 0;
1857
1858 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1859 {
1860 case '1':
1861 /* Two conversions are equal only if signedness and modes match. */
1862 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1863 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1864 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1865 return 0;
1866
1867 return operand_equal_p (TREE_OPERAND (arg0, 0),
1868 TREE_OPERAND (arg1, 0), 0);
1869
1870 case '<':
1871 case '2':
1872 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
1873 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
1874 0))
1875 return 1;
1876
1877 /* For commutative ops, allow the other order. */
1878 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
1879 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
1880 || TREE_CODE (arg0) == BIT_IOR_EXPR
1881 || TREE_CODE (arg0) == BIT_XOR_EXPR
1882 || TREE_CODE (arg0) == BIT_AND_EXPR
1883 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
1884 && operand_equal_p (TREE_OPERAND (arg0, 0),
1885 TREE_OPERAND (arg1, 1), 0)
1886 && operand_equal_p (TREE_OPERAND (arg0, 1),
1887 TREE_OPERAND (arg1, 0), 0));
1888
1889 case 'r':
1890 /* If either of the pointer (or reference) expressions we are
1891 dereferencing contain a side effect, these cannot be equal. */
1892 if (TREE_SIDE_EFFECTS (arg0)
1893 || TREE_SIDE_EFFECTS (arg1))
1894 return 0;
1895
1896 switch (TREE_CODE (arg0))
1897 {
1898 case INDIRECT_REF:
1899 return operand_equal_p (TREE_OPERAND (arg0, 0),
1900 TREE_OPERAND (arg1, 0), 0);
1901
1902 case COMPONENT_REF:
1903 case ARRAY_REF:
1904 case ARRAY_RANGE_REF:
1905 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1906 TREE_OPERAND (arg1, 0), 0)
1907 && operand_equal_p (TREE_OPERAND (arg0, 1),
1908 TREE_OPERAND (arg1, 1), 0));
1909
1910 case BIT_FIELD_REF:
1911 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1912 TREE_OPERAND (arg1, 0), 0)
1913 && operand_equal_p (TREE_OPERAND (arg0, 1),
1914 TREE_OPERAND (arg1, 1), 0)
1915 && operand_equal_p (TREE_OPERAND (arg0, 2),
1916 TREE_OPERAND (arg1, 2), 0));
1917 default:
1918 return 0;
1919 }
1920
1921 case 'e':
1922 switch (TREE_CODE (arg0))
1923 {
1924 case ADDR_EXPR:
1925 case TRUTH_NOT_EXPR:
1926 return operand_equal_p (TREE_OPERAND (arg0, 0),
1927 TREE_OPERAND (arg1, 0), 0);
1928
1929 case RTL_EXPR:
1930 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
1931
1932 case CALL_EXPR:
1933 /* If the CALL_EXPRs call different functions, then they
1934 clearly can not be equal. */
1935 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
1936 TREE_OPERAND (arg1, 0), 0))
1937 return 0;
1938
1939 /* Only consider const functions equivalent. */
1940 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR)
1941 {
1942 tree fndecl = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
1943 if (! (flags_from_decl_or_type (fndecl) & ECF_CONST))
1944 return 0;
1945 }
1946 else
1947 return 0;
1948
1949 /* Now see if all the arguments are the same. operand_equal_p
1950 does not handle TREE_LIST, so we walk the operands here
1951 feeding them to operand_equal_p. */
1952 arg0 = TREE_OPERAND (arg0, 1);
1953 arg1 = TREE_OPERAND (arg1, 1);
1954 while (arg0 && arg1)
1955 {
1956 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), 0))
1957 return 0;
1958
1959 arg0 = TREE_CHAIN (arg0);
1960 arg1 = TREE_CHAIN (arg1);
1961 }
1962
1963 /* If we get here and both argument lists are exhausted
1964 then the CALL_EXPRs are equal. */
1965 return ! (arg0 || arg1);
1966
1967 default:
1968 return 0;
1969 }
1970
1971 case 'd':
1972 /* Consider __builtin_sqrt equal to sqrt. */
1973 return TREE_CODE (arg0) == FUNCTION_DECL
1974 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
1975 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
1976 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1);
1977
1978 default:
1979 return 0;
1980 }
1981 }
1982 \f
1983 /* Similar to operand_equal_p, but see if ARG0 might have been made by
1984 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1985
1986 When in doubt, return 0. */
1987
1988 static int
1989 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
1990 {
1991 int unsignedp1, unsignedpo;
1992 tree primarg0, primarg1, primother;
1993 unsigned int correct_width;
1994
1995 if (operand_equal_p (arg0, arg1, 0))
1996 return 1;
1997
1998 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
1999 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2000 return 0;
2001
2002 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2003 and see if the inner values are the same. This removes any
2004 signedness comparison, which doesn't matter here. */
2005 primarg0 = arg0, primarg1 = arg1;
2006 STRIP_NOPS (primarg0);
2007 STRIP_NOPS (primarg1);
2008 if (operand_equal_p (primarg0, primarg1, 0))
2009 return 1;
2010
2011 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2012 actual comparison operand, ARG0.
2013
2014 First throw away any conversions to wider types
2015 already present in the operands. */
2016
2017 primarg1 = get_narrower (arg1, &unsignedp1);
2018 primother = get_narrower (other, &unsignedpo);
2019
2020 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2021 if (unsignedp1 == unsignedpo
2022 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2023 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2024 {
2025 tree type = TREE_TYPE (arg0);
2026
2027 /* Make sure shorter operand is extended the right way
2028 to match the longer operand. */
2029 primarg1 = convert ((*lang_hooks.types.signed_or_unsigned_type)
2030 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2031
2032 if (operand_equal_p (arg0, convert (type, primarg1), 0))
2033 return 1;
2034 }
2035
2036 return 0;
2037 }
2038 \f
2039 /* See if ARG is an expression that is either a comparison or is performing
2040 arithmetic on comparisons. The comparisons must only be comparing
2041 two different values, which will be stored in *CVAL1 and *CVAL2; if
2042 they are nonzero it means that some operands have already been found.
2043 No variables may be used anywhere else in the expression except in the
2044 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2045 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2046
2047 If this is true, return 1. Otherwise, return zero. */
2048
2049 static int
2050 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2051 {
2052 enum tree_code code = TREE_CODE (arg);
2053 char class = TREE_CODE_CLASS (code);
2054
2055 /* We can handle some of the 'e' cases here. */
2056 if (class == 'e' && code == TRUTH_NOT_EXPR)
2057 class = '1';
2058 else if (class == 'e'
2059 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2060 || code == COMPOUND_EXPR))
2061 class = '2';
2062
2063 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2064 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2065 {
2066 /* If we've already found a CVAL1 or CVAL2, this expression is
2067 two complex to handle. */
2068 if (*cval1 || *cval2)
2069 return 0;
2070
2071 class = '1';
2072 *save_p = 1;
2073 }
2074
2075 switch (class)
2076 {
2077 case '1':
2078 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2079
2080 case '2':
2081 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2082 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2083 cval1, cval2, save_p));
2084
2085 case 'c':
2086 return 1;
2087
2088 case 'e':
2089 if (code == COND_EXPR)
2090 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2091 cval1, cval2, save_p)
2092 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2093 cval1, cval2, save_p)
2094 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2095 cval1, cval2, save_p));
2096 return 0;
2097
2098 case '<':
2099 /* First see if we can handle the first operand, then the second. For
2100 the second operand, we know *CVAL1 can't be zero. It must be that
2101 one side of the comparison is each of the values; test for the
2102 case where this isn't true by failing if the two operands
2103 are the same. */
2104
2105 if (operand_equal_p (TREE_OPERAND (arg, 0),
2106 TREE_OPERAND (arg, 1), 0))
2107 return 0;
2108
2109 if (*cval1 == 0)
2110 *cval1 = TREE_OPERAND (arg, 0);
2111 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2112 ;
2113 else if (*cval2 == 0)
2114 *cval2 = TREE_OPERAND (arg, 0);
2115 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2116 ;
2117 else
2118 return 0;
2119
2120 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2121 ;
2122 else if (*cval2 == 0)
2123 *cval2 = TREE_OPERAND (arg, 1);
2124 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2125 ;
2126 else
2127 return 0;
2128
2129 return 1;
2130
2131 default:
2132 return 0;
2133 }
2134 }
2135 \f
2136 /* ARG is a tree that is known to contain just arithmetic operations and
2137 comparisons. Evaluate the operations in the tree substituting NEW0 for
2138 any occurrence of OLD0 as an operand of a comparison and likewise for
2139 NEW1 and OLD1. */
2140
2141 static tree
2142 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2143 {
2144 tree type = TREE_TYPE (arg);
2145 enum tree_code code = TREE_CODE (arg);
2146 char class = TREE_CODE_CLASS (code);
2147
2148 /* We can handle some of the 'e' cases here. */
2149 if (class == 'e' && code == TRUTH_NOT_EXPR)
2150 class = '1';
2151 else if (class == 'e'
2152 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2153 class = '2';
2154
2155 switch (class)
2156 {
2157 case '1':
2158 return fold (build1 (code, type,
2159 eval_subst (TREE_OPERAND (arg, 0),
2160 old0, new0, old1, new1)));
2161
2162 case '2':
2163 return fold (build (code, type,
2164 eval_subst (TREE_OPERAND (arg, 0),
2165 old0, new0, old1, new1),
2166 eval_subst (TREE_OPERAND (arg, 1),
2167 old0, new0, old1, new1)));
2168
2169 case 'e':
2170 switch (code)
2171 {
2172 case SAVE_EXPR:
2173 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2174
2175 case COMPOUND_EXPR:
2176 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2177
2178 case COND_EXPR:
2179 return fold (build (code, type,
2180 eval_subst (TREE_OPERAND (arg, 0),
2181 old0, new0, old1, new1),
2182 eval_subst (TREE_OPERAND (arg, 1),
2183 old0, new0, old1, new1),
2184 eval_subst (TREE_OPERAND (arg, 2),
2185 old0, new0, old1, new1)));
2186 default:
2187 break;
2188 }
2189 /* fall through - ??? */
2190
2191 case '<':
2192 {
2193 tree arg0 = TREE_OPERAND (arg, 0);
2194 tree arg1 = TREE_OPERAND (arg, 1);
2195
2196 /* We need to check both for exact equality and tree equality. The
2197 former will be true if the operand has a side-effect. In that
2198 case, we know the operand occurred exactly once. */
2199
2200 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2201 arg0 = new0;
2202 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2203 arg0 = new1;
2204
2205 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2206 arg1 = new0;
2207 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2208 arg1 = new1;
2209
2210 return fold (build (code, type, arg0, arg1));
2211 }
2212
2213 default:
2214 return arg;
2215 }
2216 }
2217 \f
2218 /* Return a tree for the case when the result of an expression is RESULT
2219 converted to TYPE and OMITTED was previously an operand of the expression
2220 but is now not needed (e.g., we folded OMITTED * 0).
2221
2222 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2223 the conversion of RESULT to TYPE. */
2224
2225 tree
2226 omit_one_operand (tree type, tree result, tree omitted)
2227 {
2228 tree t = convert (type, result);
2229
2230 if (TREE_SIDE_EFFECTS (omitted))
2231 return build (COMPOUND_EXPR, type, omitted, t);
2232
2233 return non_lvalue (t);
2234 }
2235
2236 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2237
2238 static tree
2239 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2240 {
2241 tree t = convert (type, result);
2242
2243 if (TREE_SIDE_EFFECTS (omitted))
2244 return build (COMPOUND_EXPR, type, omitted, t);
2245
2246 return pedantic_non_lvalue (t);
2247 }
2248 \f
2249 /* Return a simplified tree node for the truth-negation of ARG. This
2250 never alters ARG itself. We assume that ARG is an operation that
2251 returns a truth value (0 or 1). */
2252
2253 tree
2254 invert_truthvalue (tree arg)
2255 {
2256 tree type = TREE_TYPE (arg);
2257 enum tree_code code = TREE_CODE (arg);
2258
2259 if (code == ERROR_MARK)
2260 return arg;
2261
2262 /* If this is a comparison, we can simply invert it, except for
2263 floating-point non-equality comparisons, in which case we just
2264 enclose a TRUTH_NOT_EXPR around what we have. */
2265
2266 if (TREE_CODE_CLASS (code) == '<')
2267 {
2268 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2269 && !flag_unsafe_math_optimizations
2270 && code != NE_EXPR
2271 && code != EQ_EXPR)
2272 return build1 (TRUTH_NOT_EXPR, type, arg);
2273 else
2274 return build (invert_tree_comparison (code), type,
2275 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2276 }
2277
2278 switch (code)
2279 {
2280 case INTEGER_CST:
2281 return convert (type, build_int_2 (integer_zerop (arg), 0));
2282
2283 case TRUTH_AND_EXPR:
2284 return build (TRUTH_OR_EXPR, type,
2285 invert_truthvalue (TREE_OPERAND (arg, 0)),
2286 invert_truthvalue (TREE_OPERAND (arg, 1)));
2287
2288 case TRUTH_OR_EXPR:
2289 return build (TRUTH_AND_EXPR, type,
2290 invert_truthvalue (TREE_OPERAND (arg, 0)),
2291 invert_truthvalue (TREE_OPERAND (arg, 1)));
2292
2293 case TRUTH_XOR_EXPR:
2294 /* Here we can invert either operand. We invert the first operand
2295 unless the second operand is a TRUTH_NOT_EXPR in which case our
2296 result is the XOR of the first operand with the inside of the
2297 negation of the second operand. */
2298
2299 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2300 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2301 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2302 else
2303 return build (TRUTH_XOR_EXPR, type,
2304 invert_truthvalue (TREE_OPERAND (arg, 0)),
2305 TREE_OPERAND (arg, 1));
2306
2307 case TRUTH_ANDIF_EXPR:
2308 return build (TRUTH_ORIF_EXPR, type,
2309 invert_truthvalue (TREE_OPERAND (arg, 0)),
2310 invert_truthvalue (TREE_OPERAND (arg, 1)));
2311
2312 case TRUTH_ORIF_EXPR:
2313 return build (TRUTH_ANDIF_EXPR, type,
2314 invert_truthvalue (TREE_OPERAND (arg, 0)),
2315 invert_truthvalue (TREE_OPERAND (arg, 1)));
2316
2317 case TRUTH_NOT_EXPR:
2318 return TREE_OPERAND (arg, 0);
2319
2320 case COND_EXPR:
2321 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2322 invert_truthvalue (TREE_OPERAND (arg, 1)),
2323 invert_truthvalue (TREE_OPERAND (arg, 2)));
2324
2325 case COMPOUND_EXPR:
2326 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2327 invert_truthvalue (TREE_OPERAND (arg, 1)));
2328
2329 case WITH_RECORD_EXPR:
2330 return build (WITH_RECORD_EXPR, type,
2331 invert_truthvalue (TREE_OPERAND (arg, 0)),
2332 TREE_OPERAND (arg, 1));
2333
2334 case NON_LVALUE_EXPR:
2335 return invert_truthvalue (TREE_OPERAND (arg, 0));
2336
2337 case NOP_EXPR:
2338 case CONVERT_EXPR:
2339 case FLOAT_EXPR:
2340 return build1 (TREE_CODE (arg), type,
2341 invert_truthvalue (TREE_OPERAND (arg, 0)));
2342
2343 case BIT_AND_EXPR:
2344 if (!integer_onep (TREE_OPERAND (arg, 1)))
2345 break;
2346 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2347
2348 case SAVE_EXPR:
2349 return build1 (TRUTH_NOT_EXPR, type, arg);
2350
2351 case CLEANUP_POINT_EXPR:
2352 return build1 (CLEANUP_POINT_EXPR, type,
2353 invert_truthvalue (TREE_OPERAND (arg, 0)));
2354
2355 default:
2356 break;
2357 }
2358 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2359 abort ();
2360 return build1 (TRUTH_NOT_EXPR, type, arg);
2361 }
2362
2363 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2364 operands are another bit-wise operation with a common input. If so,
2365 distribute the bit operations to save an operation and possibly two if
2366 constants are involved. For example, convert
2367 (A | B) & (A | C) into A | (B & C)
2368 Further simplification will occur if B and C are constants.
2369
2370 If this optimization cannot be done, 0 will be returned. */
2371
2372 static tree
2373 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2374 {
2375 tree common;
2376 tree left, right;
2377
2378 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2379 || TREE_CODE (arg0) == code
2380 || (TREE_CODE (arg0) != BIT_AND_EXPR
2381 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2382 return 0;
2383
2384 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2385 {
2386 common = TREE_OPERAND (arg0, 0);
2387 left = TREE_OPERAND (arg0, 1);
2388 right = TREE_OPERAND (arg1, 1);
2389 }
2390 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2391 {
2392 common = TREE_OPERAND (arg0, 0);
2393 left = TREE_OPERAND (arg0, 1);
2394 right = TREE_OPERAND (arg1, 0);
2395 }
2396 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2397 {
2398 common = TREE_OPERAND (arg0, 1);
2399 left = TREE_OPERAND (arg0, 0);
2400 right = TREE_OPERAND (arg1, 1);
2401 }
2402 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2403 {
2404 common = TREE_OPERAND (arg0, 1);
2405 left = TREE_OPERAND (arg0, 0);
2406 right = TREE_OPERAND (arg1, 0);
2407 }
2408 else
2409 return 0;
2410
2411 return fold (build (TREE_CODE (arg0), type, common,
2412 fold (build (code, type, left, right))));
2413 }
2414 \f
2415 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2416 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2417
2418 static tree
2419 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos, int unsignedp)
2420 {
2421 tree result = build (BIT_FIELD_REF, type, inner,
2422 size_int (bitsize), bitsize_int (bitpos));
2423
2424 TREE_UNSIGNED (result) = unsignedp;
2425
2426 return result;
2427 }
2428
2429 /* Optimize a bit-field compare.
2430
2431 There are two cases: First is a compare against a constant and the
2432 second is a comparison of two items where the fields are at the same
2433 bit position relative to the start of a chunk (byte, halfword, word)
2434 large enough to contain it. In these cases we can avoid the shift
2435 implicit in bitfield extractions.
2436
2437 For constants, we emit a compare of the shifted constant with the
2438 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2439 compared. For two fields at the same position, we do the ANDs with the
2440 similar mask and compare the result of the ANDs.
2441
2442 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2443 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2444 are the left and right operands of the comparison, respectively.
2445
2446 If the optimization described above can be done, we return the resulting
2447 tree. Otherwise we return zero. */
2448
2449 static tree
2450 optimize_bit_field_compare (enum tree_code code, tree compare_type, tree lhs, tree rhs)
2451 {
2452 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2453 tree type = TREE_TYPE (lhs);
2454 tree signed_type, unsigned_type;
2455 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2456 enum machine_mode lmode, rmode, nmode;
2457 int lunsignedp, runsignedp;
2458 int lvolatilep = 0, rvolatilep = 0;
2459 tree linner, rinner = NULL_TREE;
2460 tree mask;
2461 tree offset;
2462
2463 /* Get all the information about the extractions being done. If the bit size
2464 if the same as the size of the underlying object, we aren't doing an
2465 extraction at all and so can do nothing. We also don't want to
2466 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2467 then will no longer be able to replace it. */
2468 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2469 &lunsignedp, &lvolatilep);
2470 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2471 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2472 return 0;
2473
2474 if (!const_p)
2475 {
2476 /* If this is not a constant, we can only do something if bit positions,
2477 sizes, and signedness are the same. */
2478 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2479 &runsignedp, &rvolatilep);
2480
2481 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2482 || lunsignedp != runsignedp || offset != 0
2483 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2484 return 0;
2485 }
2486
2487 /* See if we can find a mode to refer to this field. We should be able to,
2488 but fail if we can't. */
2489 nmode = get_best_mode (lbitsize, lbitpos,
2490 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2491 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2492 TYPE_ALIGN (TREE_TYPE (rinner))),
2493 word_mode, lvolatilep || rvolatilep);
2494 if (nmode == VOIDmode)
2495 return 0;
2496
2497 /* Set signed and unsigned types of the precision of this mode for the
2498 shifts below. */
2499 signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2500 unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2501
2502 /* Compute the bit position and size for the new reference and our offset
2503 within it. If the new reference is the same size as the original, we
2504 won't optimize anything, so return zero. */
2505 nbitsize = GET_MODE_BITSIZE (nmode);
2506 nbitpos = lbitpos & ~ (nbitsize - 1);
2507 lbitpos -= nbitpos;
2508 if (nbitsize == lbitsize)
2509 return 0;
2510
2511 if (BYTES_BIG_ENDIAN)
2512 lbitpos = nbitsize - lbitsize - lbitpos;
2513
2514 /* Make the mask to be used against the extracted field. */
2515 mask = build_int_2 (~0, ~0);
2516 TREE_TYPE (mask) = unsigned_type;
2517 force_fit_type (mask, 0);
2518 mask = convert (unsigned_type, mask);
2519 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2520 mask = const_binop (RSHIFT_EXPR, mask,
2521 size_int (nbitsize - lbitsize - lbitpos), 0);
2522
2523 if (! const_p)
2524 /* If not comparing with constant, just rework the comparison
2525 and return. */
2526 return build (code, compare_type,
2527 build (BIT_AND_EXPR, unsigned_type,
2528 make_bit_field_ref (linner, unsigned_type,
2529 nbitsize, nbitpos, 1),
2530 mask),
2531 build (BIT_AND_EXPR, unsigned_type,
2532 make_bit_field_ref (rinner, unsigned_type,
2533 nbitsize, nbitpos, 1),
2534 mask));
2535
2536 /* Otherwise, we are handling the constant case. See if the constant is too
2537 big for the field. Warn and return a tree of for 0 (false) if so. We do
2538 this not only for its own sake, but to avoid having to test for this
2539 error case below. If we didn't, we might generate wrong code.
2540
2541 For unsigned fields, the constant shifted right by the field length should
2542 be all zero. For signed fields, the high-order bits should agree with
2543 the sign bit. */
2544
2545 if (lunsignedp)
2546 {
2547 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2548 convert (unsigned_type, rhs),
2549 size_int (lbitsize), 0)))
2550 {
2551 warning ("comparison is always %d due to width of bit-field",
2552 code == NE_EXPR);
2553 return convert (compare_type,
2554 (code == NE_EXPR
2555 ? integer_one_node : integer_zero_node));
2556 }
2557 }
2558 else
2559 {
2560 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2561 size_int (lbitsize - 1), 0);
2562 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2563 {
2564 warning ("comparison is always %d due to width of bit-field",
2565 code == NE_EXPR);
2566 return convert (compare_type,
2567 (code == NE_EXPR
2568 ? integer_one_node : integer_zero_node));
2569 }
2570 }
2571
2572 /* Single-bit compares should always be against zero. */
2573 if (lbitsize == 1 && ! integer_zerop (rhs))
2574 {
2575 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2576 rhs = convert (type, integer_zero_node);
2577 }
2578
2579 /* Make a new bitfield reference, shift the constant over the
2580 appropriate number of bits and mask it with the computed mask
2581 (in case this was a signed field). If we changed it, make a new one. */
2582 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2583 if (lvolatilep)
2584 {
2585 TREE_SIDE_EFFECTS (lhs) = 1;
2586 TREE_THIS_VOLATILE (lhs) = 1;
2587 }
2588
2589 rhs = fold (const_binop (BIT_AND_EXPR,
2590 const_binop (LSHIFT_EXPR,
2591 convert (unsigned_type, rhs),
2592 size_int (lbitpos), 0),
2593 mask, 0));
2594
2595 return build (code, compare_type,
2596 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2597 rhs);
2598 }
2599 \f
2600 /* Subroutine for fold_truthop: decode a field reference.
2601
2602 If EXP is a comparison reference, we return the innermost reference.
2603
2604 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2605 set to the starting bit number.
2606
2607 If the innermost field can be completely contained in a mode-sized
2608 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2609
2610 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2611 otherwise it is not changed.
2612
2613 *PUNSIGNEDP is set to the signedness of the field.
2614
2615 *PMASK is set to the mask used. This is either contained in a
2616 BIT_AND_EXPR or derived from the width of the field.
2617
2618 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2619
2620 Return 0 if this is not a component reference or is one that we can't
2621 do anything with. */
2622
2623 static tree
2624 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize, HOST_WIDE_INT *pbitpos,
2625 enum machine_mode *pmode, int *punsignedp, int *pvolatilep,
2626 tree *pmask, tree *pand_mask)
2627 {
2628 tree outer_type = 0;
2629 tree and_mask = 0;
2630 tree mask, inner, offset;
2631 tree unsigned_type;
2632 unsigned int precision;
2633
2634 /* All the optimizations using this function assume integer fields.
2635 There are problems with FP fields since the type_for_size call
2636 below can fail for, e.g., XFmode. */
2637 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2638 return 0;
2639
2640 /* We are interested in the bare arrangement of bits, so strip everything
2641 that doesn't affect the machine mode. However, record the type of the
2642 outermost expression if it may matter below. */
2643 if (TREE_CODE (exp) == NOP_EXPR
2644 || TREE_CODE (exp) == CONVERT_EXPR
2645 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2646 outer_type = TREE_TYPE (exp);
2647 STRIP_NOPS (exp);
2648
2649 if (TREE_CODE (exp) == BIT_AND_EXPR)
2650 {
2651 and_mask = TREE_OPERAND (exp, 1);
2652 exp = TREE_OPERAND (exp, 0);
2653 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2654 if (TREE_CODE (and_mask) != INTEGER_CST)
2655 return 0;
2656 }
2657
2658 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2659 punsignedp, pvolatilep);
2660 if ((inner == exp && and_mask == 0)
2661 || *pbitsize < 0 || offset != 0
2662 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
2663 return 0;
2664
2665 /* If the number of bits in the reference is the same as the bitsize of
2666 the outer type, then the outer type gives the signedness. Otherwise
2667 (in case of a small bitfield) the signedness is unchanged. */
2668 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
2669 *punsignedp = TREE_UNSIGNED (outer_type);
2670
2671 /* Compute the mask to access the bitfield. */
2672 unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
2673 precision = TYPE_PRECISION (unsigned_type);
2674
2675 mask = build_int_2 (~0, ~0);
2676 TREE_TYPE (mask) = unsigned_type;
2677 force_fit_type (mask, 0);
2678 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2679 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2680
2681 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2682 if (and_mask != 0)
2683 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2684 convert (unsigned_type, and_mask), mask));
2685
2686 *pmask = mask;
2687 *pand_mask = and_mask;
2688 return inner;
2689 }
2690
2691 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
2692 bit positions. */
2693
2694 static int
2695 all_ones_mask_p (tree mask, int size)
2696 {
2697 tree type = TREE_TYPE (mask);
2698 unsigned int precision = TYPE_PRECISION (type);
2699 tree tmask;
2700
2701 tmask = build_int_2 (~0, ~0);
2702 TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
2703 force_fit_type (tmask, 0);
2704 return
2705 tree_int_cst_equal (mask,
2706 const_binop (RSHIFT_EXPR,
2707 const_binop (LSHIFT_EXPR, tmask,
2708 size_int (precision - size),
2709 0),
2710 size_int (precision - size), 0));
2711 }
2712
2713 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
2714 represents the sign bit of EXP's type. If EXP represents a sign
2715 or zero extension, also test VAL against the unextended type.
2716 The return value is the (sub)expression whose sign bit is VAL,
2717 or NULL_TREE otherwise. */
2718
2719 static tree
2720 sign_bit_p (tree exp, tree val)
2721 {
2722 unsigned HOST_WIDE_INT lo;
2723 HOST_WIDE_INT hi;
2724 int width;
2725 tree t;
2726
2727 /* Tree EXP must have an integral type. */
2728 t = TREE_TYPE (exp);
2729 if (! INTEGRAL_TYPE_P (t))
2730 return NULL_TREE;
2731
2732 /* Tree VAL must be an integer constant. */
2733 if (TREE_CODE (val) != INTEGER_CST
2734 || TREE_CONSTANT_OVERFLOW (val))
2735 return NULL_TREE;
2736
2737 width = TYPE_PRECISION (t);
2738 if (width > HOST_BITS_PER_WIDE_INT)
2739 {
2740 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
2741 lo = 0;
2742 }
2743 else
2744 {
2745 hi = 0;
2746 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
2747 }
2748
2749 if (TREE_INT_CST_HIGH (val) == hi && TREE_INT_CST_LOW (val) == lo)
2750 return exp;
2751
2752 /* Handle extension from a narrower type. */
2753 if (TREE_CODE (exp) == NOP_EXPR
2754 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
2755 return sign_bit_p (TREE_OPERAND (exp, 0), val);
2756
2757 return NULL_TREE;
2758 }
2759
2760 /* Subroutine for fold_truthop: determine if an operand is simple enough
2761 to be evaluated unconditionally. */
2762
2763 static int
2764 simple_operand_p (tree exp)
2765 {
2766 /* Strip any conversions that don't change the machine mode. */
2767 while ((TREE_CODE (exp) == NOP_EXPR
2768 || TREE_CODE (exp) == CONVERT_EXPR)
2769 && (TYPE_MODE (TREE_TYPE (exp))
2770 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2771 exp = TREE_OPERAND (exp, 0);
2772
2773 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2774 || (DECL_P (exp)
2775 && ! TREE_ADDRESSABLE (exp)
2776 && ! TREE_THIS_VOLATILE (exp)
2777 && ! DECL_NONLOCAL (exp)
2778 /* Don't regard global variables as simple. They may be
2779 allocated in ways unknown to the compiler (shared memory,
2780 #pragma weak, etc). */
2781 && ! TREE_PUBLIC (exp)
2782 && ! DECL_EXTERNAL (exp)
2783 /* Loading a static variable is unduly expensive, but global
2784 registers aren't expensive. */
2785 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2786 }
2787 \f
2788 /* The following functions are subroutines to fold_range_test and allow it to
2789 try to change a logical combination of comparisons into a range test.
2790
2791 For example, both
2792 X == 2 || X == 3 || X == 4 || X == 5
2793 and
2794 X >= 2 && X <= 5
2795 are converted to
2796 (unsigned) (X - 2) <= 3
2797
2798 We describe each set of comparisons as being either inside or outside
2799 a range, using a variable named like IN_P, and then describe the
2800 range with a lower and upper bound. If one of the bounds is omitted,
2801 it represents either the highest or lowest value of the type.
2802
2803 In the comments below, we represent a range by two numbers in brackets
2804 preceded by a "+" to designate being inside that range, or a "-" to
2805 designate being outside that range, so the condition can be inverted by
2806 flipping the prefix. An omitted bound is represented by a "-". For
2807 example, "- [-, 10]" means being outside the range starting at the lowest
2808 possible value and ending at 10, in other words, being greater than 10.
2809 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
2810 always false.
2811
2812 We set up things so that the missing bounds are handled in a consistent
2813 manner so neither a missing bound nor "true" and "false" need to be
2814 handled using a special case. */
2815
2816 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
2817 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
2818 and UPPER1_P are nonzero if the respective argument is an upper bound
2819 and zero for a lower. TYPE, if nonzero, is the type of the result; it
2820 must be specified for a comparison. ARG1 will be converted to ARG0's
2821 type if both are specified. */
2822
2823 static tree
2824 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p, tree arg1,
2825 int upper1_p)
2826 {
2827 tree tem;
2828 int result;
2829 int sgn0, sgn1;
2830
2831 /* If neither arg represents infinity, do the normal operation.
2832 Else, if not a comparison, return infinity. Else handle the special
2833 comparison rules. Note that most of the cases below won't occur, but
2834 are handled for consistency. */
2835
2836 if (arg0 != 0 && arg1 != 0)
2837 {
2838 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
2839 arg0, convert (TREE_TYPE (arg0), arg1)));
2840 STRIP_NOPS (tem);
2841 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
2842 }
2843
2844 if (TREE_CODE_CLASS (code) != '<')
2845 return 0;
2846
2847 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
2848 for neither. In real maths, we cannot assume open ended ranges are
2849 the same. But, this is computer arithmetic, where numbers are finite.
2850 We can therefore make the transformation of any unbounded range with
2851 the value Z, Z being greater than any representable number. This permits
2852 us to treat unbounded ranges as equal. */
2853 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
2854 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
2855 switch (code)
2856 {
2857 case EQ_EXPR:
2858 result = sgn0 == sgn1;
2859 break;
2860 case NE_EXPR:
2861 result = sgn0 != sgn1;
2862 break;
2863 case LT_EXPR:
2864 result = sgn0 < sgn1;
2865 break;
2866 case LE_EXPR:
2867 result = sgn0 <= sgn1;
2868 break;
2869 case GT_EXPR:
2870 result = sgn0 > sgn1;
2871 break;
2872 case GE_EXPR:
2873 result = sgn0 >= sgn1;
2874 break;
2875 default:
2876 abort ();
2877 }
2878
2879 return convert (type, result ? integer_one_node : integer_zero_node);
2880 }
2881 \f
2882 /* Given EXP, a logical expression, set the range it is testing into
2883 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
2884 actually being tested. *PLOW and *PHIGH will be made of the same type
2885 as the returned expression. If EXP is not a comparison, we will most
2886 likely not be returning a useful value and range. */
2887
2888 static tree
2889 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
2890 {
2891 enum tree_code code;
2892 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
2893 tree orig_type = NULL_TREE;
2894 int in_p, n_in_p;
2895 tree low, high, n_low, n_high;
2896
2897 /* Start with simply saying "EXP != 0" and then look at the code of EXP
2898 and see if we can refine the range. Some of the cases below may not
2899 happen, but it doesn't seem worth worrying about this. We "continue"
2900 the outer loop when we've changed something; otherwise we "break"
2901 the switch, which will "break" the while. */
2902
2903 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
2904
2905 while (1)
2906 {
2907 code = TREE_CODE (exp);
2908
2909 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
2910 {
2911 arg0 = TREE_OPERAND (exp, 0);
2912 if (TREE_CODE_CLASS (code) == '<'
2913 || TREE_CODE_CLASS (code) == '1'
2914 || TREE_CODE_CLASS (code) == '2')
2915 type = TREE_TYPE (arg0);
2916 if (TREE_CODE_CLASS (code) == '2'
2917 || TREE_CODE_CLASS (code) == '<'
2918 || (TREE_CODE_CLASS (code) == 'e'
2919 && TREE_CODE_LENGTH (code) > 1))
2920 arg1 = TREE_OPERAND (exp, 1);
2921 }
2922
2923 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
2924 lose a cast by accident. */
2925 if (type != NULL_TREE && orig_type == NULL_TREE)
2926 orig_type = type;
2927
2928 switch (code)
2929 {
2930 case TRUTH_NOT_EXPR:
2931 in_p = ! in_p, exp = arg0;
2932 continue;
2933
2934 case EQ_EXPR: case NE_EXPR:
2935 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
2936 /* We can only do something if the range is testing for zero
2937 and if the second operand is an integer constant. Note that
2938 saying something is "in" the range we make is done by
2939 complementing IN_P since it will set in the initial case of
2940 being not equal to zero; "out" is leaving it alone. */
2941 if (low == 0 || high == 0
2942 || ! integer_zerop (low) || ! integer_zerop (high)
2943 || TREE_CODE (arg1) != INTEGER_CST)
2944 break;
2945
2946 switch (code)
2947 {
2948 case NE_EXPR: /* - [c, c] */
2949 low = high = arg1;
2950 break;
2951 case EQ_EXPR: /* + [c, c] */
2952 in_p = ! in_p, low = high = arg1;
2953 break;
2954 case GT_EXPR: /* - [-, c] */
2955 low = 0, high = arg1;
2956 break;
2957 case GE_EXPR: /* + [c, -] */
2958 in_p = ! in_p, low = arg1, high = 0;
2959 break;
2960 case LT_EXPR: /* - [c, -] */
2961 low = arg1, high = 0;
2962 break;
2963 case LE_EXPR: /* + [-, c] */
2964 in_p = ! in_p, low = 0, high = arg1;
2965 break;
2966 default:
2967 abort ();
2968 }
2969
2970 exp = arg0;
2971
2972 /* If this is an unsigned comparison, we also know that EXP is
2973 greater than or equal to zero. We base the range tests we make
2974 on that fact, so we record it here so we can parse existing
2975 range tests. */
2976 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
2977 {
2978 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
2979 1, convert (type, integer_zero_node),
2980 NULL_TREE))
2981 break;
2982
2983 in_p = n_in_p, low = n_low, high = n_high;
2984
2985 /* If the high bound is missing, but we
2986 have a low bound, reverse the range so
2987 it goes from zero to the low bound minus 1. */
2988 if (high == 0 && low)
2989 {
2990 in_p = ! in_p;
2991 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
2992 integer_one_node, 0);
2993 low = convert (type, integer_zero_node);
2994 }
2995 }
2996 continue;
2997
2998 case NEGATE_EXPR:
2999 /* (-x) IN [a,b] -> x in [-b, -a] */
3000 n_low = range_binop (MINUS_EXPR, type,
3001 convert (type, integer_zero_node), 0, high, 1);
3002 n_high = range_binop (MINUS_EXPR, type,
3003 convert (type, integer_zero_node), 0, low, 0);
3004 low = n_low, high = n_high;
3005 exp = arg0;
3006 continue;
3007
3008 case BIT_NOT_EXPR:
3009 /* ~ X -> -X - 1 */
3010 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3011 convert (type, integer_one_node));
3012 continue;
3013
3014 case PLUS_EXPR: case MINUS_EXPR:
3015 if (TREE_CODE (arg1) != INTEGER_CST)
3016 break;
3017
3018 /* If EXP is signed, any overflow in the computation is undefined,
3019 so we don't worry about it so long as our computations on
3020 the bounds don't overflow. For unsigned, overflow is defined
3021 and this is exactly the right thing. */
3022 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3023 type, low, 0, arg1, 0);
3024 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3025 type, high, 1, arg1, 0);
3026 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3027 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3028 break;
3029
3030 /* Check for an unsigned range which has wrapped around the maximum
3031 value thus making n_high < n_low, and normalize it. */
3032 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3033 {
3034 low = range_binop (PLUS_EXPR, type, n_high, 0,
3035 integer_one_node, 0);
3036 high = range_binop (MINUS_EXPR, type, n_low, 0,
3037 integer_one_node, 0);
3038
3039 /* If the range is of the form +/- [ x+1, x ], we won't
3040 be able to normalize it. But then, it represents the
3041 whole range or the empty set, so make it
3042 +/- [ -, - ]. */
3043 if (tree_int_cst_equal (n_low, low)
3044 && tree_int_cst_equal (n_high, high))
3045 low = high = 0;
3046 else
3047 in_p = ! in_p;
3048 }
3049 else
3050 low = n_low, high = n_high;
3051
3052 exp = arg0;
3053 continue;
3054
3055 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3056 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3057 break;
3058
3059 if (! INTEGRAL_TYPE_P (type)
3060 || (low != 0 && ! int_fits_type_p (low, type))
3061 || (high != 0 && ! int_fits_type_p (high, type)))
3062 break;
3063
3064 n_low = low, n_high = high;
3065
3066 if (n_low != 0)
3067 n_low = convert (type, n_low);
3068
3069 if (n_high != 0)
3070 n_high = convert (type, n_high);
3071
3072 /* If we're converting from an unsigned to a signed type,
3073 we will be doing the comparison as unsigned. The tests above
3074 have already verified that LOW and HIGH are both positive.
3075
3076 So we have to make sure that the original unsigned value will
3077 be interpreted as positive. */
3078 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3079 {
3080 tree equiv_type = (*lang_hooks.types.type_for_mode)
3081 (TYPE_MODE (type), 1);
3082 tree high_positive;
3083
3084 /* A range without an upper bound is, naturally, unbounded.
3085 Since convert would have cropped a very large value, use
3086 the max value for the destination type. */
3087 high_positive
3088 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3089 : TYPE_MAX_VALUE (type);
3090
3091 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3092 high_positive = fold (build (RSHIFT_EXPR, type,
3093 convert (type, high_positive),
3094 convert (type, integer_one_node)));
3095
3096 /* If the low bound is specified, "and" the range with the
3097 range for which the original unsigned value will be
3098 positive. */
3099 if (low != 0)
3100 {
3101 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3102 1, n_low, n_high,
3103 1, convert (type, integer_zero_node),
3104 high_positive))
3105 break;
3106
3107 in_p = (n_in_p == in_p);
3108 }
3109 else
3110 {
3111 /* Otherwise, "or" the range with the range of the input
3112 that will be interpreted as negative. */
3113 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3114 0, n_low, n_high,
3115 1, convert (type, integer_zero_node),
3116 high_positive))
3117 break;
3118
3119 in_p = (in_p != n_in_p);
3120 }
3121 }
3122
3123 exp = arg0;
3124 low = n_low, high = n_high;
3125 continue;
3126
3127 default:
3128 break;
3129 }
3130
3131 break;
3132 }
3133
3134 /* If EXP is a constant, we can evaluate whether this is true or false. */
3135 if (TREE_CODE (exp) == INTEGER_CST)
3136 {
3137 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3138 exp, 0, low, 0))
3139 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3140 exp, 1, high, 1)));
3141 low = high = 0;
3142 exp = 0;
3143 }
3144
3145 *pin_p = in_p, *plow = low, *phigh = high;
3146 return exp;
3147 }
3148 \f
3149 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3150 type, TYPE, return an expression to test if EXP is in (or out of, depending
3151 on IN_P) the range. */
3152
3153 static tree
3154 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3155 {
3156 tree etype = TREE_TYPE (exp);
3157 tree value;
3158
3159 if (! in_p
3160 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3161 return invert_truthvalue (value);
3162
3163 if (low == 0 && high == 0)
3164 return convert (type, integer_one_node);
3165
3166 if (low == 0)
3167 return fold (build (LE_EXPR, type, exp, high));
3168
3169 if (high == 0)
3170 return fold (build (GE_EXPR, type, exp, low));
3171
3172 if (operand_equal_p (low, high, 0))
3173 return fold (build (EQ_EXPR, type, exp, low));
3174
3175 if (integer_zerop (low))
3176 {
3177 if (! TREE_UNSIGNED (etype))
3178 {
3179 etype = (*lang_hooks.types.unsigned_type) (etype);
3180 high = convert (etype, high);
3181 exp = convert (etype, exp);
3182 }
3183 return build_range_check (type, exp, 1, 0, high);
3184 }
3185
3186 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3187 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3188 {
3189 unsigned HOST_WIDE_INT lo;
3190 HOST_WIDE_INT hi;
3191 int prec;
3192
3193 prec = TYPE_PRECISION (etype);
3194 if (prec <= HOST_BITS_PER_WIDE_INT)
3195 {
3196 hi = 0;
3197 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3198 }
3199 else
3200 {
3201 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3202 lo = (unsigned HOST_WIDE_INT) -1;
3203 }
3204
3205 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3206 {
3207 if (TREE_UNSIGNED (etype))
3208 {
3209 etype = (*lang_hooks.types.signed_type) (etype);
3210 exp = convert (etype, exp);
3211 }
3212 return fold (build (GT_EXPR, type, exp,
3213 convert (etype, integer_zero_node)));
3214 }
3215 }
3216
3217 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3218 && ! TREE_OVERFLOW (value))
3219 return build_range_check (type,
3220 fold (build (MINUS_EXPR, etype, exp, low)),
3221 1, convert (etype, integer_zero_node), value);
3222
3223 return 0;
3224 }
3225 \f
3226 /* Given two ranges, see if we can merge them into one. Return 1 if we
3227 can, 0 if we can't. Set the output range into the specified parameters. */
3228
3229 static int
3230 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0, tree high0,
3231 int in1_p, tree low1, tree high1)
3232 {
3233 int no_overlap;
3234 int subset;
3235 int temp;
3236 tree tem;
3237 int in_p;
3238 tree low, high;
3239 int lowequal = ((low0 == 0 && low1 == 0)
3240 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3241 low0, 0, low1, 0)));
3242 int highequal = ((high0 == 0 && high1 == 0)
3243 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3244 high0, 1, high1, 1)));
3245
3246 /* Make range 0 be the range that starts first, or ends last if they
3247 start at the same value. Swap them if it isn't. */
3248 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3249 low0, 0, low1, 0))
3250 || (lowequal
3251 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3252 high1, 1, high0, 1))))
3253 {
3254 temp = in0_p, in0_p = in1_p, in1_p = temp;
3255 tem = low0, low0 = low1, low1 = tem;
3256 tem = high0, high0 = high1, high1 = tem;
3257 }
3258
3259 /* Now flag two cases, whether the ranges are disjoint or whether the
3260 second range is totally subsumed in the first. Note that the tests
3261 below are simplified by the ones above. */
3262 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3263 high0, 1, low1, 0));
3264 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3265 high1, 1, high0, 1));
3266
3267 /* We now have four cases, depending on whether we are including or
3268 excluding the two ranges. */
3269 if (in0_p && in1_p)
3270 {
3271 /* If they don't overlap, the result is false. If the second range
3272 is a subset it is the result. Otherwise, the range is from the start
3273 of the second to the end of the first. */
3274 if (no_overlap)
3275 in_p = 0, low = high = 0;
3276 else if (subset)
3277 in_p = 1, low = low1, high = high1;
3278 else
3279 in_p = 1, low = low1, high = high0;
3280 }
3281
3282 else if (in0_p && ! in1_p)
3283 {
3284 /* If they don't overlap, the result is the first range. If they are
3285 equal, the result is false. If the second range is a subset of the
3286 first, and the ranges begin at the same place, we go from just after
3287 the end of the first range to the end of the second. If the second
3288 range is not a subset of the first, or if it is a subset and both
3289 ranges end at the same place, the range starts at the start of the
3290 first range and ends just before the second range.
3291 Otherwise, we can't describe this as a single range. */
3292 if (no_overlap)
3293 in_p = 1, low = low0, high = high0;
3294 else if (lowequal && highequal)
3295 in_p = 0, low = high = 0;
3296 else if (subset && lowequal)
3297 {
3298 in_p = 1, high = high0;
3299 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3300 integer_one_node, 0);
3301 }
3302 else if (! subset || highequal)
3303 {
3304 in_p = 1, low = low0;
3305 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3306 integer_one_node, 0);
3307 }
3308 else
3309 return 0;
3310 }
3311
3312 else if (! in0_p && in1_p)
3313 {
3314 /* If they don't overlap, the result is the second range. If the second
3315 is a subset of the first, the result is false. Otherwise,
3316 the range starts just after the first range and ends at the
3317 end of the second. */
3318 if (no_overlap)
3319 in_p = 1, low = low1, high = high1;
3320 else if (subset || highequal)
3321 in_p = 0, low = high = 0;
3322 else
3323 {
3324 in_p = 1, high = high1;
3325 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3326 integer_one_node, 0);
3327 }
3328 }
3329
3330 else
3331 {
3332 /* The case where we are excluding both ranges. Here the complex case
3333 is if they don't overlap. In that case, the only time we have a
3334 range is if they are adjacent. If the second is a subset of the
3335 first, the result is the first. Otherwise, the range to exclude
3336 starts at the beginning of the first range and ends at the end of the
3337 second. */
3338 if (no_overlap)
3339 {
3340 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3341 range_binop (PLUS_EXPR, NULL_TREE,
3342 high0, 1,
3343 integer_one_node, 1),
3344 1, low1, 0)))
3345 in_p = 0, low = low0, high = high1;
3346 else
3347 return 0;
3348 }
3349 else if (subset)
3350 in_p = 0, low = low0, high = high0;
3351 else
3352 in_p = 0, low = low0, high = high1;
3353 }
3354
3355 *pin_p = in_p, *plow = low, *phigh = high;
3356 return 1;
3357 }
3358 \f
3359 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3360 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3361 #endif
3362
3363 /* EXP is some logical combination of boolean tests. See if we can
3364 merge it into some range test. Return the new tree if so. */
3365
3366 static tree
3367 fold_range_test (tree exp)
3368 {
3369 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3370 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3371 int in0_p, in1_p, in_p;
3372 tree low0, low1, low, high0, high1, high;
3373 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3374 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3375 tree tem;
3376
3377 /* If this is an OR operation, invert both sides; we will invert
3378 again at the end. */
3379 if (or_op)
3380 in0_p = ! in0_p, in1_p = ! in1_p;
3381
3382 /* If both expressions are the same, if we can merge the ranges, and we
3383 can build the range test, return it or it inverted. If one of the
3384 ranges is always true or always false, consider it to be the same
3385 expression as the other. */
3386 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3387 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3388 in1_p, low1, high1)
3389 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3390 lhs != 0 ? lhs
3391 : rhs != 0 ? rhs : integer_zero_node,
3392 in_p, low, high))))
3393 return or_op ? invert_truthvalue (tem) : tem;
3394
3395 /* On machines where the branch cost is expensive, if this is a
3396 short-circuited branch and the underlying object on both sides
3397 is the same, make a non-short-circuit operation. */
3398 else if (RANGE_TEST_NON_SHORT_CIRCUIT
3399 && lhs != 0 && rhs != 0
3400 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3401 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3402 && operand_equal_p (lhs, rhs, 0))
3403 {
3404 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3405 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3406 which cases we can't do this. */
3407 if (simple_operand_p (lhs))
3408 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3409 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3410 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3411 TREE_OPERAND (exp, 1));
3412
3413 else if ((*lang_hooks.decls.global_bindings_p) () == 0
3414 && ! CONTAINS_PLACEHOLDER_P (lhs))
3415 {
3416 tree common = save_expr (lhs);
3417
3418 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3419 or_op ? ! in0_p : in0_p,
3420 low0, high0))
3421 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3422 or_op ? ! in1_p : in1_p,
3423 low1, high1))))
3424 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3425 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3426 TREE_TYPE (exp), lhs, rhs);
3427 }
3428 }
3429
3430 return 0;
3431 }
3432 \f
3433 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3434 bit value. Arrange things so the extra bits will be set to zero if and
3435 only if C is signed-extended to its full width. If MASK is nonzero,
3436 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3437
3438 static tree
3439 unextend (tree c, int p, int unsignedp, tree mask)
3440 {
3441 tree type = TREE_TYPE (c);
3442 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3443 tree temp;
3444
3445 if (p == modesize || unsignedp)
3446 return c;
3447
3448 /* We work by getting just the sign bit into the low-order bit, then
3449 into the high-order bit, then sign-extend. We then XOR that value
3450 with C. */
3451 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3452 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3453
3454 /* We must use a signed type in order to get an arithmetic right shift.
3455 However, we must also avoid introducing accidental overflows, so that
3456 a subsequent call to integer_zerop will work. Hence we must
3457 do the type conversion here. At this point, the constant is either
3458 zero or one, and the conversion to a signed type can never overflow.
3459 We could get an overflow if this conversion is done anywhere else. */
3460 if (TREE_UNSIGNED (type))
3461 temp = convert ((*lang_hooks.types.signed_type) (type), temp);
3462
3463 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3464 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3465 if (mask != 0)
3466 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3467 /* If necessary, convert the type back to match the type of C. */
3468 if (TREE_UNSIGNED (type))
3469 temp = convert (type, temp);
3470
3471 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3472 }
3473 \f
3474 /* Find ways of folding logical expressions of LHS and RHS:
3475 Try to merge two comparisons to the same innermost item.
3476 Look for range tests like "ch >= '0' && ch <= '9'".
3477 Look for combinations of simple terms on machines with expensive branches
3478 and evaluate the RHS unconditionally.
3479
3480 For example, if we have p->a == 2 && p->b == 4 and we can make an
3481 object large enough to span both A and B, we can do this with a comparison
3482 against the object ANDed with the a mask.
3483
3484 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3485 operations to do this with one comparison.
3486
3487 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3488 function and the one above.
3489
3490 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3491 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3492
3493 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3494 two operands.
3495
3496 We return the simplified tree or 0 if no optimization is possible. */
3497
3498 static tree
3499 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
3500 {
3501 /* If this is the "or" of two comparisons, we can do something if
3502 the comparisons are NE_EXPR. If this is the "and", we can do something
3503 if the comparisons are EQ_EXPR. I.e.,
3504 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3505
3506 WANTED_CODE is this operation code. For single bit fields, we can
3507 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3508 comparison for one-bit fields. */
3509
3510 enum tree_code wanted_code;
3511 enum tree_code lcode, rcode;
3512 tree ll_arg, lr_arg, rl_arg, rr_arg;
3513 tree ll_inner, lr_inner, rl_inner, rr_inner;
3514 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3515 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3516 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3517 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3518 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3519 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3520 enum machine_mode lnmode, rnmode;
3521 tree ll_mask, lr_mask, rl_mask, rr_mask;
3522 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3523 tree l_const, r_const;
3524 tree lntype, rntype, result;
3525 int first_bit, end_bit;
3526 int volatilep;
3527
3528 /* Start by getting the comparison codes. Fail if anything is volatile.
3529 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3530 it were surrounded with a NE_EXPR. */
3531
3532 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3533 return 0;
3534
3535 lcode = TREE_CODE (lhs);
3536 rcode = TREE_CODE (rhs);
3537
3538 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3539 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3540
3541 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3542 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3543
3544 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3545 return 0;
3546
3547 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3548 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3549
3550 ll_arg = TREE_OPERAND (lhs, 0);
3551 lr_arg = TREE_OPERAND (lhs, 1);
3552 rl_arg = TREE_OPERAND (rhs, 0);
3553 rr_arg = TREE_OPERAND (rhs, 1);
3554
3555 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3556 if (simple_operand_p (ll_arg)
3557 && simple_operand_p (lr_arg)
3558 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3559 {
3560 int compcode;
3561
3562 if (operand_equal_p (ll_arg, rl_arg, 0)
3563 && operand_equal_p (lr_arg, rr_arg, 0))
3564 {
3565 int lcompcode, rcompcode;
3566
3567 lcompcode = comparison_to_compcode (lcode);
3568 rcompcode = comparison_to_compcode (rcode);
3569 compcode = (code == TRUTH_AND_EXPR)
3570 ? lcompcode & rcompcode
3571 : lcompcode | rcompcode;
3572 }
3573 else if (operand_equal_p (ll_arg, rr_arg, 0)
3574 && operand_equal_p (lr_arg, rl_arg, 0))
3575 {
3576 int lcompcode, rcompcode;
3577
3578 rcode = swap_tree_comparison (rcode);
3579 lcompcode = comparison_to_compcode (lcode);
3580 rcompcode = comparison_to_compcode (rcode);
3581 compcode = (code == TRUTH_AND_EXPR)
3582 ? lcompcode & rcompcode
3583 : lcompcode | rcompcode;
3584 }
3585 else
3586 compcode = -1;
3587
3588 if (compcode == COMPCODE_TRUE)
3589 return convert (truth_type, integer_one_node);
3590 else if (compcode == COMPCODE_FALSE)
3591 return convert (truth_type, integer_zero_node);
3592 else if (compcode != -1)
3593 return build (compcode_to_comparison (compcode),
3594 truth_type, ll_arg, lr_arg);
3595 }
3596
3597 /* If the RHS can be evaluated unconditionally and its operands are
3598 simple, it wins to evaluate the RHS unconditionally on machines
3599 with expensive branches. In this case, this isn't a comparison
3600 that can be merged. Avoid doing this if the RHS is a floating-point
3601 comparison since those can trap. */
3602
3603 if (BRANCH_COST >= 2
3604 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3605 && simple_operand_p (rl_arg)
3606 && simple_operand_p (rr_arg))
3607 {
3608 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
3609 if (code == TRUTH_OR_EXPR
3610 && lcode == NE_EXPR && integer_zerop (lr_arg)
3611 && rcode == NE_EXPR && integer_zerop (rr_arg)
3612 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3613 return build (NE_EXPR, truth_type,
3614 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3615 ll_arg, rl_arg),
3616 integer_zero_node);
3617
3618 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
3619 if (code == TRUTH_AND_EXPR
3620 && lcode == EQ_EXPR && integer_zerop (lr_arg)
3621 && rcode == EQ_EXPR && integer_zerop (rr_arg)
3622 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3623 return build (EQ_EXPR, truth_type,
3624 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3625 ll_arg, rl_arg),
3626 integer_zero_node);
3627
3628 return build (code, truth_type, lhs, rhs);
3629 }
3630
3631 /* See if the comparisons can be merged. Then get all the parameters for
3632 each side. */
3633
3634 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3635 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3636 return 0;
3637
3638 volatilep = 0;
3639 ll_inner = decode_field_reference (ll_arg,
3640 &ll_bitsize, &ll_bitpos, &ll_mode,
3641 &ll_unsignedp, &volatilep, &ll_mask,
3642 &ll_and_mask);
3643 lr_inner = decode_field_reference (lr_arg,
3644 &lr_bitsize, &lr_bitpos, &lr_mode,
3645 &lr_unsignedp, &volatilep, &lr_mask,
3646 &lr_and_mask);
3647 rl_inner = decode_field_reference (rl_arg,
3648 &rl_bitsize, &rl_bitpos, &rl_mode,
3649 &rl_unsignedp, &volatilep, &rl_mask,
3650 &rl_and_mask);
3651 rr_inner = decode_field_reference (rr_arg,
3652 &rr_bitsize, &rr_bitpos, &rr_mode,
3653 &rr_unsignedp, &volatilep, &rr_mask,
3654 &rr_and_mask);
3655
3656 /* It must be true that the inner operation on the lhs of each
3657 comparison must be the same if we are to be able to do anything.
3658 Then see if we have constants. If not, the same must be true for
3659 the rhs's. */
3660 if (volatilep || ll_inner == 0 || rl_inner == 0
3661 || ! operand_equal_p (ll_inner, rl_inner, 0))
3662 return 0;
3663
3664 if (TREE_CODE (lr_arg) == INTEGER_CST
3665 && TREE_CODE (rr_arg) == INTEGER_CST)
3666 l_const = lr_arg, r_const = rr_arg;
3667 else if (lr_inner == 0 || rr_inner == 0
3668 || ! operand_equal_p (lr_inner, rr_inner, 0))
3669 return 0;
3670 else
3671 l_const = r_const = 0;
3672
3673 /* If either comparison code is not correct for our logical operation,
3674 fail. However, we can convert a one-bit comparison against zero into
3675 the opposite comparison against that bit being set in the field. */
3676
3677 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3678 if (lcode != wanted_code)
3679 {
3680 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3681 {
3682 /* Make the left operand unsigned, since we are only interested
3683 in the value of one bit. Otherwise we are doing the wrong
3684 thing below. */
3685 ll_unsignedp = 1;
3686 l_const = ll_mask;
3687 }
3688 else
3689 return 0;
3690 }
3691
3692 /* This is analogous to the code for l_const above. */
3693 if (rcode != wanted_code)
3694 {
3695 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3696 {
3697 rl_unsignedp = 1;
3698 r_const = rl_mask;
3699 }
3700 else
3701 return 0;
3702 }
3703
3704 /* After this point all optimizations will generate bit-field
3705 references, which we might not want. */
3706 if (! (*lang_hooks.can_use_bit_fields_p) ())
3707 return 0;
3708
3709 /* See if we can find a mode that contains both fields being compared on
3710 the left. If we can't, fail. Otherwise, update all constants and masks
3711 to be relative to a field of that size. */
3712 first_bit = MIN (ll_bitpos, rl_bitpos);
3713 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3714 lnmode = get_best_mode (end_bit - first_bit, first_bit,
3715 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3716 volatilep);
3717 if (lnmode == VOIDmode)
3718 return 0;
3719
3720 lnbitsize = GET_MODE_BITSIZE (lnmode);
3721 lnbitpos = first_bit & ~ (lnbitsize - 1);
3722 lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
3723 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3724
3725 if (BYTES_BIG_ENDIAN)
3726 {
3727 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3728 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3729 }
3730
3731 ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
3732 size_int (xll_bitpos), 0);
3733 rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
3734 size_int (xrl_bitpos), 0);
3735
3736 if (l_const)
3737 {
3738 l_const = convert (lntype, l_const);
3739 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3740 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3741 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3742 fold (build1 (BIT_NOT_EXPR,
3743 lntype, ll_mask)),
3744 0)))
3745 {
3746 warning ("comparison is always %d", wanted_code == NE_EXPR);
3747
3748 return convert (truth_type,
3749 wanted_code == NE_EXPR
3750 ? integer_one_node : integer_zero_node);
3751 }
3752 }
3753 if (r_const)
3754 {
3755 r_const = convert (lntype, r_const);
3756 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3757 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3758 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3759 fold (build1 (BIT_NOT_EXPR,
3760 lntype, rl_mask)),
3761 0)))
3762 {
3763 warning ("comparison is always %d", wanted_code == NE_EXPR);
3764
3765 return convert (truth_type,
3766 wanted_code == NE_EXPR
3767 ? integer_one_node : integer_zero_node);
3768 }
3769 }
3770
3771 /* If the right sides are not constant, do the same for it. Also,
3772 disallow this optimization if a size or signedness mismatch occurs
3773 between the left and right sides. */
3774 if (l_const == 0)
3775 {
3776 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3777 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3778 /* Make sure the two fields on the right
3779 correspond to the left without being swapped. */
3780 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3781 return 0;
3782
3783 first_bit = MIN (lr_bitpos, rr_bitpos);
3784 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3785 rnmode = get_best_mode (end_bit - first_bit, first_bit,
3786 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3787 volatilep);
3788 if (rnmode == VOIDmode)
3789 return 0;
3790
3791 rnbitsize = GET_MODE_BITSIZE (rnmode);
3792 rnbitpos = first_bit & ~ (rnbitsize - 1);
3793 rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
3794 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3795
3796 if (BYTES_BIG_ENDIAN)
3797 {
3798 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3799 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3800 }
3801
3802 lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
3803 size_int (xlr_bitpos), 0);
3804 rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
3805 size_int (xrr_bitpos), 0);
3806
3807 /* Make a mask that corresponds to both fields being compared.
3808 Do this for both items being compared. If the operands are the
3809 same size and the bits being compared are in the same position
3810 then we can do this by masking both and comparing the masked
3811 results. */
3812 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3813 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3814 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
3815 {
3816 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3817 ll_unsignedp || rl_unsignedp);
3818 if (! all_ones_mask_p (ll_mask, lnbitsize))
3819 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
3820
3821 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
3822 lr_unsignedp || rr_unsignedp);
3823 if (! all_ones_mask_p (lr_mask, rnbitsize))
3824 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
3825
3826 return build (wanted_code, truth_type, lhs, rhs);
3827 }
3828
3829 /* There is still another way we can do something: If both pairs of
3830 fields being compared are adjacent, we may be able to make a wider
3831 field containing them both.
3832
3833 Note that we still must mask the lhs/rhs expressions. Furthermore,
3834 the mask must be shifted to account for the shift done by
3835 make_bit_field_ref. */
3836 if ((ll_bitsize + ll_bitpos == rl_bitpos
3837 && lr_bitsize + lr_bitpos == rr_bitpos)
3838 || (ll_bitpos == rl_bitpos + rl_bitsize
3839 && lr_bitpos == rr_bitpos + rr_bitsize))
3840 {
3841 tree type;
3842
3843 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
3844 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
3845 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
3846 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
3847
3848 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
3849 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
3850 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
3851 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
3852
3853 /* Convert to the smaller type before masking out unwanted bits. */
3854 type = lntype;
3855 if (lntype != rntype)
3856 {
3857 if (lnbitsize > rnbitsize)
3858 {
3859 lhs = convert (rntype, lhs);
3860 ll_mask = convert (rntype, ll_mask);
3861 type = rntype;
3862 }
3863 else if (lnbitsize < rnbitsize)
3864 {
3865 rhs = convert (lntype, rhs);
3866 lr_mask = convert (lntype, lr_mask);
3867 type = lntype;
3868 }
3869 }
3870
3871 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
3872 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3873
3874 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
3875 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
3876
3877 return build (wanted_code, truth_type, lhs, rhs);
3878 }
3879
3880 return 0;
3881 }
3882
3883 /* Handle the case of comparisons with constants. If there is something in
3884 common between the masks, those bits of the constants must be the same.
3885 If not, the condition is always false. Test for this to avoid generating
3886 incorrect code below. */
3887 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3888 if (! integer_zerop (result)
3889 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3890 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3891 {
3892 if (wanted_code == NE_EXPR)
3893 {
3894 warning ("`or' of unmatched not-equal tests is always 1");
3895 return convert (truth_type, integer_one_node);
3896 }
3897 else
3898 {
3899 warning ("`and' of mutually exclusive equal-tests is always 0");
3900 return convert (truth_type, integer_zero_node);
3901 }
3902 }
3903
3904 /* Construct the expression we will return. First get the component
3905 reference we will make. Unless the mask is all ones the width of
3906 that field, perform the mask operation. Then compare with the
3907 merged constant. */
3908 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3909 ll_unsignedp || rl_unsignedp);
3910
3911 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3912 if (! all_ones_mask_p (ll_mask, lnbitsize))
3913 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
3914
3915 return build (wanted_code, truth_type, result,
3916 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
3917 }
3918 \f
3919 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
3920 constant. */
3921
3922 static tree
3923 optimize_minmax_comparison (tree t)
3924 {
3925 tree type = TREE_TYPE (t);
3926 tree arg0 = TREE_OPERAND (t, 0);
3927 enum tree_code op_code;
3928 tree comp_const = TREE_OPERAND (t, 1);
3929 tree minmax_const;
3930 int consts_equal, consts_lt;
3931 tree inner;
3932
3933 STRIP_SIGN_NOPS (arg0);
3934
3935 op_code = TREE_CODE (arg0);
3936 minmax_const = TREE_OPERAND (arg0, 1);
3937 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
3938 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
3939 inner = TREE_OPERAND (arg0, 0);
3940
3941 /* If something does not permit us to optimize, return the original tree. */
3942 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
3943 || TREE_CODE (comp_const) != INTEGER_CST
3944 || TREE_CONSTANT_OVERFLOW (comp_const)
3945 || TREE_CODE (minmax_const) != INTEGER_CST
3946 || TREE_CONSTANT_OVERFLOW (minmax_const))
3947 return t;
3948
3949 /* Now handle all the various comparison codes. We only handle EQ_EXPR
3950 and GT_EXPR, doing the rest with recursive calls using logical
3951 simplifications. */
3952 switch (TREE_CODE (t))
3953 {
3954 case NE_EXPR: case LT_EXPR: case LE_EXPR:
3955 return
3956 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
3957
3958 case GE_EXPR:
3959 return
3960 fold (build (TRUTH_ORIF_EXPR, type,
3961 optimize_minmax_comparison
3962 (build (EQ_EXPR, type, arg0, comp_const)),
3963 optimize_minmax_comparison
3964 (build (GT_EXPR, type, arg0, comp_const))));
3965
3966 case EQ_EXPR:
3967 if (op_code == MAX_EXPR && consts_equal)
3968 /* MAX (X, 0) == 0 -> X <= 0 */
3969 return fold (build (LE_EXPR, type, inner, comp_const));
3970
3971 else if (op_code == MAX_EXPR && consts_lt)
3972 /* MAX (X, 0) == 5 -> X == 5 */
3973 return fold (build (EQ_EXPR, type, inner, comp_const));
3974
3975 else if (op_code == MAX_EXPR)
3976 /* MAX (X, 0) == -1 -> false */
3977 return omit_one_operand (type, integer_zero_node, inner);
3978
3979 else if (consts_equal)
3980 /* MIN (X, 0) == 0 -> X >= 0 */
3981 return fold (build (GE_EXPR, type, inner, comp_const));
3982
3983 else if (consts_lt)
3984 /* MIN (X, 0) == 5 -> false */
3985 return omit_one_operand (type, integer_zero_node, inner);
3986
3987 else
3988 /* MIN (X, 0) == -1 -> X == -1 */
3989 return fold (build (EQ_EXPR, type, inner, comp_const));
3990
3991 case GT_EXPR:
3992 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
3993 /* MAX (X, 0) > 0 -> X > 0
3994 MAX (X, 0) > 5 -> X > 5 */
3995 return fold (build (GT_EXPR, type, inner, comp_const));
3996
3997 else if (op_code == MAX_EXPR)
3998 /* MAX (X, 0) > -1 -> true */
3999 return omit_one_operand (type, integer_one_node, inner);
4000
4001 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4002 /* MIN (X, 0) > 0 -> false
4003 MIN (X, 0) > 5 -> false */
4004 return omit_one_operand (type, integer_zero_node, inner);
4005
4006 else
4007 /* MIN (X, 0) > -1 -> X > -1 */
4008 return fold (build (GT_EXPR, type, inner, comp_const));
4009
4010 default:
4011 return t;
4012 }
4013 }
4014 \f
4015 /* T is an integer expression that is being multiplied, divided, or taken a
4016 modulus (CODE says which and what kind of divide or modulus) by a
4017 constant C. See if we can eliminate that operation by folding it with
4018 other operations already in T. WIDE_TYPE, if non-null, is a type that
4019 should be used for the computation if wider than our type.
4020
4021 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4022 (X * 2) + (Y * 4). We must, however, be assured that either the original
4023 expression would not overflow or that overflow is undefined for the type
4024 in the language in question.
4025
4026 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4027 the machine has a multiply-accumulate insn or that this is part of an
4028 addressing calculation.
4029
4030 If we return a non-null expression, it is an equivalent form of the
4031 original computation, but need not be in the original type. */
4032
4033 static tree
4034 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4035 {
4036 /* To avoid exponential search depth, refuse to allow recursion past
4037 three levels. Beyond that (1) it's highly unlikely that we'll find
4038 something interesting and (2) we've probably processed it before
4039 when we built the inner expression. */
4040
4041 static int depth;
4042 tree ret;
4043
4044 if (depth > 3)
4045 return NULL;
4046
4047 depth++;
4048 ret = extract_muldiv_1 (t, c, code, wide_type);
4049 depth--;
4050
4051 return ret;
4052 }
4053
4054 static tree
4055 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4056 {
4057 tree type = TREE_TYPE (t);
4058 enum tree_code tcode = TREE_CODE (t);
4059 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4060 > GET_MODE_SIZE (TYPE_MODE (type)))
4061 ? wide_type : type);
4062 tree t1, t2;
4063 int same_p = tcode == code;
4064 tree op0 = NULL_TREE, op1 = NULL_TREE;
4065
4066 /* Don't deal with constants of zero here; they confuse the code below. */
4067 if (integer_zerop (c))
4068 return NULL_TREE;
4069
4070 if (TREE_CODE_CLASS (tcode) == '1')
4071 op0 = TREE_OPERAND (t, 0);
4072
4073 if (TREE_CODE_CLASS (tcode) == '2')
4074 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4075
4076 /* Note that we need not handle conditional operations here since fold
4077 already handles those cases. So just do arithmetic here. */
4078 switch (tcode)
4079 {
4080 case INTEGER_CST:
4081 /* For a constant, we can always simplify if we are a multiply
4082 or (for divide and modulus) if it is a multiple of our constant. */
4083 if (code == MULT_EXPR
4084 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4085 return const_binop (code, convert (ctype, t), convert (ctype, c), 0);
4086 break;
4087
4088 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4089 /* If op0 is an expression ... */
4090 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4091 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4092 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4093 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4094 /* ... and is unsigned, and its type is smaller than ctype,
4095 then we cannot pass through as widening. */
4096 && ((TREE_UNSIGNED (TREE_TYPE (op0))
4097 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4098 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4099 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4100 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4101 /* ... or its type is larger than ctype,
4102 then we cannot pass through this truncation. */
4103 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4104 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4105 /* ... or signedness changes for division or modulus,
4106 then we cannot pass through this conversion. */
4107 || (code != MULT_EXPR
4108 && (TREE_UNSIGNED (ctype)
4109 != TREE_UNSIGNED (TREE_TYPE (op0))))))
4110 break;
4111
4112 /* Pass the constant down and see if we can make a simplification. If
4113 we can, replace this expression with the inner simplification for
4114 possible later conversion to our or some other type. */
4115 if ((t2 = convert (TREE_TYPE (op0), c)) != 0
4116 && TREE_CODE (t2) == INTEGER_CST
4117 && ! TREE_CONSTANT_OVERFLOW (t2)
4118 && (0 != (t1 = extract_muldiv (op0, t2, code,
4119 code == MULT_EXPR
4120 ? ctype : NULL_TREE))))
4121 return t1;
4122 break;
4123
4124 case NEGATE_EXPR: case ABS_EXPR:
4125 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4126 return fold (build1 (tcode, ctype, convert (ctype, t1)));
4127 break;
4128
4129 case MIN_EXPR: case MAX_EXPR:
4130 /* If widening the type changes the signedness, then we can't perform
4131 this optimization as that changes the result. */
4132 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4133 break;
4134
4135 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4136 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4137 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4138 {
4139 if (tree_int_cst_sgn (c) < 0)
4140 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4141
4142 return fold (build (tcode, ctype, convert (ctype, t1),
4143 convert (ctype, t2)));
4144 }
4145 break;
4146
4147 case WITH_RECORD_EXPR:
4148 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4149 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4150 TREE_OPERAND (t, 1));
4151 break;
4152
4153 case LSHIFT_EXPR: case RSHIFT_EXPR:
4154 /* If the second operand is constant, this is a multiplication
4155 or floor division, by a power of two, so we can treat it that
4156 way unless the multiplier or divisor overflows. */
4157 if (TREE_CODE (op1) == INTEGER_CST
4158 /* const_binop may not detect overflow correctly,
4159 so check for it explicitly here. */
4160 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4161 && TREE_INT_CST_HIGH (op1) == 0
4162 && 0 != (t1 = convert (ctype,
4163 const_binop (LSHIFT_EXPR, size_one_node,
4164 op1, 0)))
4165 && ! TREE_OVERFLOW (t1))
4166 return extract_muldiv (build (tcode == LSHIFT_EXPR
4167 ? MULT_EXPR : FLOOR_DIV_EXPR,
4168 ctype, convert (ctype, op0), t1),
4169 c, code, wide_type);
4170 break;
4171
4172 case PLUS_EXPR: case MINUS_EXPR:
4173 /* See if we can eliminate the operation on both sides. If we can, we
4174 can return a new PLUS or MINUS. If we can't, the only remaining
4175 cases where we can do anything are if the second operand is a
4176 constant. */
4177 t1 = extract_muldiv (op0, c, code, wide_type);
4178 t2 = extract_muldiv (op1, c, code, wide_type);
4179 if (t1 != 0 && t2 != 0
4180 && (code == MULT_EXPR
4181 /* If not multiplication, we can only do this if both operands
4182 are divisible by c. */
4183 || (multiple_of_p (ctype, op0, c)
4184 && multiple_of_p (ctype, op1, c))))
4185 return fold (build (tcode, ctype, convert (ctype, t1),
4186 convert (ctype, t2)));
4187
4188 /* If this was a subtraction, negate OP1 and set it to be an addition.
4189 This simplifies the logic below. */
4190 if (tcode == MINUS_EXPR)
4191 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4192
4193 if (TREE_CODE (op1) != INTEGER_CST)
4194 break;
4195
4196 /* If either OP1 or C are negative, this optimization is not safe for
4197 some of the division and remainder types while for others we need
4198 to change the code. */
4199 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4200 {
4201 if (code == CEIL_DIV_EXPR)
4202 code = FLOOR_DIV_EXPR;
4203 else if (code == FLOOR_DIV_EXPR)
4204 code = CEIL_DIV_EXPR;
4205 else if (code != MULT_EXPR
4206 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4207 break;
4208 }
4209
4210 /* If it's a multiply or a division/modulus operation of a multiple
4211 of our constant, do the operation and verify it doesn't overflow. */
4212 if (code == MULT_EXPR
4213 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4214 {
4215 op1 = const_binop (code, convert (ctype, op1), convert (ctype, c), 0);
4216 if (op1 == 0 || TREE_OVERFLOW (op1))
4217 break;
4218 }
4219 else
4220 break;
4221
4222 /* If we have an unsigned type is not a sizetype, we cannot widen
4223 the operation since it will change the result if the original
4224 computation overflowed. */
4225 if (TREE_UNSIGNED (ctype)
4226 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4227 && ctype != type)
4228 break;
4229
4230 /* If we were able to eliminate our operation from the first side,
4231 apply our operation to the second side and reform the PLUS. */
4232 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4233 return fold (build (tcode, ctype, convert (ctype, t1), op1));
4234
4235 /* The last case is if we are a multiply. In that case, we can
4236 apply the distributive law to commute the multiply and addition
4237 if the multiplication of the constants doesn't overflow. */
4238 if (code == MULT_EXPR)
4239 return fold (build (tcode, ctype, fold (build (code, ctype,
4240 convert (ctype, op0),
4241 convert (ctype, c))),
4242 op1));
4243
4244 break;
4245
4246 case MULT_EXPR:
4247 /* We have a special case here if we are doing something like
4248 (C * 8) % 4 since we know that's zero. */
4249 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4250 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4251 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4252 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4253 return omit_one_operand (type, integer_zero_node, op0);
4254
4255 /* ... fall through ... */
4256
4257 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4258 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4259 /* If we can extract our operation from the LHS, do so and return a
4260 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4261 do something only if the second operand is a constant. */
4262 if (same_p
4263 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4264 return fold (build (tcode, ctype, convert (ctype, t1),
4265 convert (ctype, op1)));
4266 else if (tcode == MULT_EXPR && code == MULT_EXPR
4267 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4268 return fold (build (tcode, ctype, convert (ctype, op0),
4269 convert (ctype, t1)));
4270 else if (TREE_CODE (op1) != INTEGER_CST)
4271 return 0;
4272
4273 /* If these are the same operation types, we can associate them
4274 assuming no overflow. */
4275 if (tcode == code
4276 && 0 != (t1 = const_binop (MULT_EXPR, convert (ctype, op1),
4277 convert (ctype, c), 0))
4278 && ! TREE_OVERFLOW (t1))
4279 return fold (build (tcode, ctype, convert (ctype, op0), t1));
4280
4281 /* If these operations "cancel" each other, we have the main
4282 optimizations of this pass, which occur when either constant is a
4283 multiple of the other, in which case we replace this with either an
4284 operation or CODE or TCODE.
4285
4286 If we have an unsigned type that is not a sizetype, we cannot do
4287 this since it will change the result if the original computation
4288 overflowed. */
4289 if ((! TREE_UNSIGNED (ctype)
4290 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4291 && ! flag_wrapv
4292 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4293 || (tcode == MULT_EXPR
4294 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4295 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4296 {
4297 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4298 return fold (build (tcode, ctype, convert (ctype, op0),
4299 convert (ctype,
4300 const_binop (TRUNC_DIV_EXPR,
4301 op1, c, 0))));
4302 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4303 return fold (build (code, ctype, convert (ctype, op0),
4304 convert (ctype,
4305 const_binop (TRUNC_DIV_EXPR,
4306 c, op1, 0))));
4307 }
4308 break;
4309
4310 default:
4311 break;
4312 }
4313
4314 return 0;
4315 }
4316 \f
4317 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4318 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4319 that we may sometimes modify the tree. */
4320
4321 static tree
4322 strip_compound_expr (tree t, tree s)
4323 {
4324 enum tree_code code = TREE_CODE (t);
4325
4326 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4327 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4328 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4329 return TREE_OPERAND (t, 1);
4330
4331 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4332 don't bother handling any other types. */
4333 else if (code == COND_EXPR)
4334 {
4335 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4336 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4337 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4338 }
4339 else if (TREE_CODE_CLASS (code) == '1')
4340 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4341 else if (TREE_CODE_CLASS (code) == '<'
4342 || TREE_CODE_CLASS (code) == '2')
4343 {
4344 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4345 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4346 }
4347
4348 return t;
4349 }
4350 \f
4351 /* Return a node which has the indicated constant VALUE (either 0 or
4352 1), and is of the indicated TYPE. */
4353
4354 static tree
4355 constant_boolean_node (int value, tree type)
4356 {
4357 if (type == integer_type_node)
4358 return value ? integer_one_node : integer_zero_node;
4359 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4360 return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4361 integer_zero_node);
4362 else
4363 {
4364 tree t = build_int_2 (value, 0);
4365
4366 TREE_TYPE (t) = type;
4367 return t;
4368 }
4369 }
4370
4371 /* Utility function for the following routine, to see how complex a nesting of
4372 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4373 we don't care (to avoid spending too much time on complex expressions.). */
4374
4375 static int
4376 count_cond (tree expr, int lim)
4377 {
4378 int ctrue, cfalse;
4379
4380 if (TREE_CODE (expr) != COND_EXPR)
4381 return 0;
4382 else if (lim <= 0)
4383 return 0;
4384
4385 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4386 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4387 return MIN (lim, 1 + ctrue + cfalse);
4388 }
4389
4390 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4391 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4392 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4393 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4394 COND is the first argument to CODE; otherwise (as in the example
4395 given here), it is the second argument. TYPE is the type of the
4396 original expression. */
4397
4398 static tree
4399 fold_binary_op_with_conditional_arg (enum tree_code code, tree type, tree cond, tree arg, int cond_first_p)
4400 {
4401 tree test, true_value, false_value;
4402 tree lhs = NULL_TREE;
4403 tree rhs = NULL_TREE;
4404 /* In the end, we'll produce a COND_EXPR. Both arms of the
4405 conditional expression will be binary operations. The left-hand
4406 side of the expression to be executed if the condition is true
4407 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4408 of the expression to be executed if the condition is true will be
4409 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4410 but apply to the expression to be executed if the conditional is
4411 false. */
4412 tree *true_lhs;
4413 tree *true_rhs;
4414 tree *false_lhs;
4415 tree *false_rhs;
4416 /* These are the codes to use for the left-hand side and right-hand
4417 side of the COND_EXPR. Normally, they are the same as CODE. */
4418 enum tree_code lhs_code = code;
4419 enum tree_code rhs_code = code;
4420 /* And these are the types of the expressions. */
4421 tree lhs_type = type;
4422 tree rhs_type = type;
4423 int save = 0;
4424
4425 if (cond_first_p)
4426 {
4427 true_rhs = false_rhs = &arg;
4428 true_lhs = &true_value;
4429 false_lhs = &false_value;
4430 }
4431 else
4432 {
4433 true_lhs = false_lhs = &arg;
4434 true_rhs = &true_value;
4435 false_rhs = &false_value;
4436 }
4437
4438 if (TREE_CODE (cond) == COND_EXPR)
4439 {
4440 test = TREE_OPERAND (cond, 0);
4441 true_value = TREE_OPERAND (cond, 1);
4442 false_value = TREE_OPERAND (cond, 2);
4443 /* If this operand throws an expression, then it does not make
4444 sense to try to perform a logical or arithmetic operation
4445 involving it. Instead of building `a + throw 3' for example,
4446 we simply build `a, throw 3'. */
4447 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4448 {
4449 if (! cond_first_p)
4450 {
4451 lhs_code = COMPOUND_EXPR;
4452 lhs_type = void_type_node;
4453 }
4454 else
4455 lhs = true_value;
4456 }
4457 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4458 {
4459 if (! cond_first_p)
4460 {
4461 rhs_code = COMPOUND_EXPR;
4462 rhs_type = void_type_node;
4463 }
4464 else
4465 rhs = false_value;
4466 }
4467 }
4468 else
4469 {
4470 tree testtype = TREE_TYPE (cond);
4471 test = cond;
4472 true_value = convert (testtype, integer_one_node);
4473 false_value = convert (testtype, integer_zero_node);
4474 }
4475
4476 /* If ARG is complex we want to make sure we only evaluate it once. Though
4477 this is only required if it is volatile, it might be more efficient even
4478 if it is not. However, if we succeed in folding one part to a constant,
4479 we do not need to make this SAVE_EXPR. Since we do this optimization
4480 primarily to see if we do end up with constant and this SAVE_EXPR
4481 interferes with later optimizations, suppressing it when we can is
4482 important.
4483
4484 If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4485 do so. Don't try to see if the result is a constant if an arm is a
4486 COND_EXPR since we get exponential behavior in that case. */
4487
4488 if (saved_expr_p (arg))
4489 save = 1;
4490 else if (lhs == 0 && rhs == 0
4491 && !TREE_CONSTANT (arg)
4492 && (*lang_hooks.decls.global_bindings_p) () == 0
4493 && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4494 || TREE_SIDE_EFFECTS (arg)))
4495 {
4496 if (TREE_CODE (true_value) != COND_EXPR)
4497 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4498
4499 if (TREE_CODE (false_value) != COND_EXPR)
4500 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4501
4502 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4503 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4504 {
4505 arg = save_expr (arg);
4506 lhs = rhs = 0;
4507 save = 1;
4508 }
4509 }
4510
4511 if (lhs == 0)
4512 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4513 if (rhs == 0)
4514 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4515
4516 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4517
4518 if (save)
4519 return build (COMPOUND_EXPR, type,
4520 convert (void_type_node, arg),
4521 strip_compound_expr (test, arg));
4522 else
4523 return convert (type, test);
4524 }
4525
4526 \f
4527 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4528
4529 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4530 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4531 ADDEND is the same as X.
4532
4533 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4534 and finite. The problematic cases are when X is zero, and its mode
4535 has signed zeros. In the case of rounding towards -infinity,
4536 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4537 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4538
4539 static bool
4540 fold_real_zero_addition_p (tree type, tree addend, int negate)
4541 {
4542 if (!real_zerop (addend))
4543 return false;
4544
4545 /* Don't allow the fold with -fsignaling-nans. */
4546 if (HONOR_SNANS (TYPE_MODE (type)))
4547 return false;
4548
4549 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4550 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4551 return true;
4552
4553 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4554 if (TREE_CODE (addend) == REAL_CST
4555 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4556 negate = !negate;
4557
4558 /* The mode has signed zeros, and we have to honor their sign.
4559 In this situation, there is only one case we can return true for.
4560 X - 0 is the same as X unless rounding towards -infinity is
4561 supported. */
4562 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4563 }
4564
4565 /* Subroutine of fold() that checks comparisons of built-in math
4566 functions against real constants.
4567
4568 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
4569 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
4570 is the type of the result and ARG0 and ARG1 are the operands of the
4571 comparison. ARG1 must be a TREE_REAL_CST.
4572
4573 The function returns the constant folded tree if a simplification
4574 can be made, and NULL_TREE otherwise. */
4575
4576 static tree
4577 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code, tree type, tree arg0, tree arg1)
4578 {
4579 REAL_VALUE_TYPE c;
4580
4581 if (fcode == BUILT_IN_SQRT
4582 || fcode == BUILT_IN_SQRTF
4583 || fcode == BUILT_IN_SQRTL)
4584 {
4585 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
4586 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
4587
4588 c = TREE_REAL_CST (arg1);
4589 if (REAL_VALUE_NEGATIVE (c))
4590 {
4591 /* sqrt(x) < y is always false, if y is negative. */
4592 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
4593 return omit_one_operand (type,
4594 convert (type, integer_zero_node),
4595 arg);
4596
4597 /* sqrt(x) > y is always true, if y is negative and we
4598 don't care about NaNs, i.e. negative values of x. */
4599 if (code == NE_EXPR || !HONOR_NANS (mode))
4600 return omit_one_operand (type,
4601 convert (type, integer_one_node),
4602 arg);
4603
4604 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
4605 return fold (build (GE_EXPR, type, arg,
4606 build_real (TREE_TYPE (arg), dconst0)));
4607 }
4608 else if (code == GT_EXPR || code == GE_EXPR)
4609 {
4610 REAL_VALUE_TYPE c2;
4611
4612 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4613 real_convert (&c2, mode, &c2);
4614
4615 if (REAL_VALUE_ISINF (c2))
4616 {
4617 /* sqrt(x) > y is x == +Inf, when y is very large. */
4618 if (HONOR_INFINITIES (mode))
4619 return fold (build (EQ_EXPR, type, arg,
4620 build_real (TREE_TYPE (arg), c2)));
4621
4622 /* sqrt(x) > y is always false, when y is very large
4623 and we don't care about infinities. */
4624 return omit_one_operand (type,
4625 convert (type, integer_zero_node),
4626 arg);
4627 }
4628
4629 /* sqrt(x) > c is the same as x > c*c. */
4630 return fold (build (code, type, arg,
4631 build_real (TREE_TYPE (arg), c2)));
4632 }
4633 else if (code == LT_EXPR || code == LE_EXPR)
4634 {
4635 REAL_VALUE_TYPE c2;
4636
4637 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4638 real_convert (&c2, mode, &c2);
4639
4640 if (REAL_VALUE_ISINF (c2))
4641 {
4642 /* sqrt(x) < y is always true, when y is a very large
4643 value and we don't care about NaNs or Infinities. */
4644 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
4645 return omit_one_operand (type,
4646 convert (type, integer_one_node),
4647 arg);
4648
4649 /* sqrt(x) < y is x != +Inf when y is very large and we
4650 don't care about NaNs. */
4651 if (! HONOR_NANS (mode))
4652 return fold (build (NE_EXPR, type, arg,
4653 build_real (TREE_TYPE (arg), c2)));
4654
4655 /* sqrt(x) < y is x >= 0 when y is very large and we
4656 don't care about Infinities. */
4657 if (! HONOR_INFINITIES (mode))
4658 return fold (build (GE_EXPR, type, arg,
4659 build_real (TREE_TYPE (arg), dconst0)));
4660
4661 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
4662 if ((*lang_hooks.decls.global_bindings_p) () != 0
4663 || CONTAINS_PLACEHOLDER_P (arg))
4664 return NULL_TREE;
4665
4666 arg = save_expr (arg);
4667 return fold (build (TRUTH_ANDIF_EXPR, type,
4668 fold (build (GE_EXPR, type, arg,
4669 build_real (TREE_TYPE (arg),
4670 dconst0))),
4671 fold (build (NE_EXPR, type, arg,
4672 build_real (TREE_TYPE (arg),
4673 c2)))));
4674 }
4675
4676 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
4677 if (! HONOR_NANS (mode))
4678 return fold (build (code, type, arg,
4679 build_real (TREE_TYPE (arg), c2)));
4680
4681 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
4682 if ((*lang_hooks.decls.global_bindings_p) () == 0
4683 && ! CONTAINS_PLACEHOLDER_P (arg))
4684 {
4685 arg = save_expr (arg);
4686 return fold (build (TRUTH_ANDIF_EXPR, type,
4687 fold (build (GE_EXPR, type, arg,
4688 build_real (TREE_TYPE (arg),
4689 dconst0))),
4690 fold (build (code, type, arg,
4691 build_real (TREE_TYPE (arg),
4692 c2)))));
4693 }
4694 }
4695 }
4696
4697 return NULL_TREE;
4698 }
4699
4700 /* Subroutine of fold() that optimizes comparisons against Infinities,
4701 either +Inf or -Inf.
4702
4703 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
4704 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
4705 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
4706
4707 The function returns the constant folded tree if a simplification
4708 can be made, and NULL_TREE otherwise. */
4709
4710 static tree
4711 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
4712 {
4713 enum machine_mode mode;
4714 REAL_VALUE_TYPE max;
4715 tree temp;
4716 bool neg;
4717
4718 mode = TYPE_MODE (TREE_TYPE (arg0));
4719
4720 /* For negative infinity swap the sense of the comparison. */
4721 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
4722 if (neg)
4723 code = swap_tree_comparison (code);
4724
4725 switch (code)
4726 {
4727 case GT_EXPR:
4728 /* x > +Inf is always false, if with ignore sNANs. */
4729 if (HONOR_SNANS (mode))
4730 return NULL_TREE;
4731 return omit_one_operand (type,
4732 convert (type, integer_zero_node),
4733 arg0);
4734
4735 case LE_EXPR:
4736 /* x <= +Inf is always true, if we don't case about NaNs. */
4737 if (! HONOR_NANS (mode))
4738 return omit_one_operand (type,
4739 convert (type, integer_one_node),
4740 arg0);
4741
4742 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
4743 if ((*lang_hooks.decls.global_bindings_p) () == 0
4744 && ! CONTAINS_PLACEHOLDER_P (arg0))
4745 {
4746 arg0 = save_expr (arg0);
4747 return fold (build (EQ_EXPR, type, arg0, arg0));
4748 }
4749 break;
4750
4751 case EQ_EXPR:
4752 case GE_EXPR:
4753 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
4754 real_maxval (&max, neg, mode);
4755 return fold (build (neg ? LT_EXPR : GT_EXPR, type,
4756 arg0, build_real (TREE_TYPE (arg0), max)));
4757
4758 case LT_EXPR:
4759 /* x < +Inf is always equal to x <= DBL_MAX. */
4760 real_maxval (&max, neg, mode);
4761 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
4762 arg0, build_real (TREE_TYPE (arg0), max)));
4763
4764 case NE_EXPR:
4765 /* x != +Inf is always equal to !(x > DBL_MAX). */
4766 real_maxval (&max, neg, mode);
4767 if (! HONOR_NANS (mode))
4768 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
4769 arg0, build_real (TREE_TYPE (arg0), max)));
4770 temp = fold (build (neg ? LT_EXPR : GT_EXPR, type,
4771 arg0, build_real (TREE_TYPE (arg0), max)));
4772 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
4773
4774 default:
4775 break;
4776 }
4777
4778 return NULL_TREE;
4779 }
4780
4781 /* If CODE with arguments ARG0 and ARG1 represents a single bit
4782 equality/inequality test, then return a simplified form of
4783 the test using shifts and logical operations. Otherwise return
4784 NULL. TYPE is the desired result type. */
4785
4786 tree
4787 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
4788 tree result_type)
4789 {
4790 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
4791 operand 0. */
4792 if (code == TRUTH_NOT_EXPR)
4793 {
4794 code = TREE_CODE (arg0);
4795 if (code != NE_EXPR && code != EQ_EXPR)
4796 return NULL_TREE;
4797
4798 /* Extract the arguments of the EQ/NE. */
4799 arg1 = TREE_OPERAND (arg0, 1);
4800 arg0 = TREE_OPERAND (arg0, 0);
4801
4802 /* This requires us to invert the code. */
4803 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
4804 }
4805
4806 /* If this is testing a single bit, we can optimize the test. */
4807 if ((code == NE_EXPR || code == EQ_EXPR)
4808 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
4809 && integer_pow2p (TREE_OPERAND (arg0, 1)))
4810 {
4811 tree inner = TREE_OPERAND (arg0, 0);
4812 tree type = TREE_TYPE (arg0);
4813 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
4814 enum machine_mode operand_mode = TYPE_MODE (type);
4815 int ops_unsigned;
4816 tree signed_type, unsigned_type;
4817 tree arg00;
4818
4819 /* If we have (A & C) != 0 where C is the sign bit of A, convert
4820 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
4821 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
4822 if (arg00 != NULL_TREE)
4823 {
4824 tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
4825 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, result_type,
4826 convert (stype, arg00),
4827 convert (stype, integer_zero_node)));
4828 }
4829
4830 /* Otherwise we have (A & C) != 0 where C is a single bit,
4831 convert that into ((A >> C2) & 1). Where C2 = log2(C).
4832 Similarly for (A & C) == 0. */
4833
4834 /* If INNER is a right shift of a constant and it plus BITNUM does
4835 not overflow, adjust BITNUM and INNER. */
4836 if (TREE_CODE (inner) == RSHIFT_EXPR
4837 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
4838 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
4839 && bitnum < TYPE_PRECISION (type)
4840 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
4841 bitnum - TYPE_PRECISION (type)))
4842 {
4843 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
4844 inner = TREE_OPERAND (inner, 0);
4845 }
4846
4847 /* If we are going to be able to omit the AND below, we must do our
4848 operations as unsigned. If we must use the AND, we have a choice.
4849 Normally unsigned is faster, but for some machines signed is. */
4850 ops_unsigned = (bitnum == TYPE_PRECISION (type) - 1 ? 1
4851 #ifdef LOAD_EXTEND_OP
4852 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
4853 #else
4854 : 1
4855 #endif
4856 );
4857
4858 signed_type = (*lang_hooks.types.type_for_mode) (operand_mode, 0);
4859 unsigned_type = (*lang_hooks.types.type_for_mode) (operand_mode, 1);
4860
4861 if (bitnum != 0)
4862 inner = build (RSHIFT_EXPR, ops_unsigned ? unsigned_type : signed_type,
4863 inner, size_int (bitnum));
4864
4865 if (code == EQ_EXPR)
4866 inner = build (BIT_XOR_EXPR, ops_unsigned ? unsigned_type : signed_type,
4867 inner, integer_one_node);
4868
4869 /* Put the AND last so it can combine with more things. */
4870 if (bitnum != TYPE_PRECISION (type) - 1)
4871 inner = build (BIT_AND_EXPR, ops_unsigned ? unsigned_type : signed_type,
4872 inner, integer_one_node);
4873
4874 /* Make sure to return the proper type. */
4875 if (TREE_TYPE (inner) != result_type)
4876 inner = convert (result_type, inner);
4877
4878 return inner;
4879 }
4880 return NULL_TREE;
4881 }
4882
4883 /* Perform constant folding and related simplification of EXPR.
4884 The related simplifications include x*1 => x, x*0 => 0, etc.,
4885 and application of the associative law.
4886 NOP_EXPR conversions may be removed freely (as long as we
4887 are careful not to change the C type of the overall expression)
4888 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4889 but we can constant-fold them if they have constant operands. */
4890
4891 tree
4892 fold (tree expr)
4893 {
4894 tree t = expr;
4895 tree t1 = NULL_TREE;
4896 tree tem;
4897 tree type = TREE_TYPE (expr);
4898 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4899 enum tree_code code = TREE_CODE (t);
4900 int kind = TREE_CODE_CLASS (code);
4901 int invert;
4902 /* WINS will be nonzero when the switch is done
4903 if all operands are constant. */
4904 int wins = 1;
4905
4906 /* Don't try to process an RTL_EXPR since its operands aren't trees.
4907 Likewise for a SAVE_EXPR that's already been evaluated. */
4908 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
4909 return t;
4910
4911 /* Return right away if a constant. */
4912 if (kind == 'c')
4913 return t;
4914
4915 #ifdef MAX_INTEGER_COMPUTATION_MODE
4916 check_max_integer_computation_mode (expr);
4917 #endif
4918
4919 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
4920 {
4921 tree subop;
4922
4923 /* Special case for conversion ops that can have fixed point args. */
4924 arg0 = TREE_OPERAND (t, 0);
4925
4926 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
4927 if (arg0 != 0)
4928 STRIP_SIGN_NOPS (arg0);
4929
4930 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
4931 subop = TREE_REALPART (arg0);
4932 else
4933 subop = arg0;
4934
4935 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
4936 && TREE_CODE (subop) != REAL_CST
4937 )
4938 /* Note that TREE_CONSTANT isn't enough:
4939 static var addresses are constant but we can't
4940 do arithmetic on them. */
4941 wins = 0;
4942 }
4943 else if (IS_EXPR_CODE_CLASS (kind) || kind == 'r')
4944 {
4945 int len = first_rtl_op (code);
4946 int i;
4947 for (i = 0; i < len; i++)
4948 {
4949 tree op = TREE_OPERAND (t, i);
4950 tree subop;
4951
4952 if (op == 0)
4953 continue; /* Valid for CALL_EXPR, at least. */
4954
4955 if (kind == '<' || code == RSHIFT_EXPR)
4956 {
4957 /* Signedness matters here. Perhaps we can refine this
4958 later. */
4959 STRIP_SIGN_NOPS (op);
4960 }
4961 else
4962 /* Strip any conversions that don't change the mode. */
4963 STRIP_NOPS (op);
4964
4965 if (TREE_CODE (op) == COMPLEX_CST)
4966 subop = TREE_REALPART (op);
4967 else
4968 subop = op;
4969
4970 if (TREE_CODE (subop) != INTEGER_CST
4971 && TREE_CODE (subop) != REAL_CST)
4972 /* Note that TREE_CONSTANT isn't enough:
4973 static var addresses are constant but we can't
4974 do arithmetic on them. */
4975 wins = 0;
4976
4977 if (i == 0)
4978 arg0 = op;
4979 else if (i == 1)
4980 arg1 = op;
4981 }
4982 }
4983
4984 /* If this is a commutative operation, and ARG0 is a constant, move it
4985 to ARG1 to reduce the number of tests below. */
4986 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
4987 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
4988 || code == BIT_AND_EXPR)
4989 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
4990 {
4991 tem = arg0; arg0 = arg1; arg1 = tem;
4992
4993 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
4994 TREE_OPERAND (t, 1) = tem;
4995 }
4996
4997 /* Now WINS is set as described above,
4998 ARG0 is the first operand of EXPR,
4999 and ARG1 is the second operand (if it has more than one operand).
5000
5001 First check for cases where an arithmetic operation is applied to a
5002 compound, conditional, or comparison operation. Push the arithmetic
5003 operation inside the compound or conditional to see if any folding
5004 can then be done. Convert comparison to conditional for this purpose.
5005 The also optimizes non-constant cases that used to be done in
5006 expand_expr.
5007
5008 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5009 one of the operands is a comparison and the other is a comparison, a
5010 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5011 code below would make the expression more complex. Change it to a
5012 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5013 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5014
5015 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5016 || code == EQ_EXPR || code == NE_EXPR)
5017 && ((truth_value_p (TREE_CODE (arg0))
5018 && (truth_value_p (TREE_CODE (arg1))
5019 || (TREE_CODE (arg1) == BIT_AND_EXPR
5020 && integer_onep (TREE_OPERAND (arg1, 1)))))
5021 || (truth_value_p (TREE_CODE (arg1))
5022 && (truth_value_p (TREE_CODE (arg0))
5023 || (TREE_CODE (arg0) == BIT_AND_EXPR
5024 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5025 {
5026 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5027 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5028 : TRUTH_XOR_EXPR,
5029 type, arg0, arg1));
5030
5031 if (code == EQ_EXPR)
5032 t = invert_truthvalue (t);
5033
5034 return t;
5035 }
5036
5037 if (TREE_CODE_CLASS (code) == '1')
5038 {
5039 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5040 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5041 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5042 else if (TREE_CODE (arg0) == COND_EXPR)
5043 {
5044 tree arg01 = TREE_OPERAND (arg0, 1);
5045 tree arg02 = TREE_OPERAND (arg0, 2);
5046 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5047 arg01 = fold (build1 (code, type, arg01));
5048 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5049 arg02 = fold (build1 (code, type, arg02));
5050 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5051 arg01, arg02));
5052
5053 /* If this was a conversion, and all we did was to move into
5054 inside the COND_EXPR, bring it back out. But leave it if
5055 it is a conversion from integer to integer and the
5056 result precision is no wider than a word since such a
5057 conversion is cheap and may be optimized away by combine,
5058 while it couldn't if it were outside the COND_EXPR. Then return
5059 so we don't get into an infinite recursion loop taking the
5060 conversion out and then back in. */
5061
5062 if ((code == NOP_EXPR || code == CONVERT_EXPR
5063 || code == NON_LVALUE_EXPR)
5064 && TREE_CODE (t) == COND_EXPR
5065 && TREE_CODE (TREE_OPERAND (t, 1)) == code
5066 && TREE_CODE (TREE_OPERAND (t, 2)) == code
5067 && ! VOID_TYPE_P (TREE_OPERAND (t, 1))
5068 && ! VOID_TYPE_P (TREE_OPERAND (t, 2))
5069 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
5070 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
5071 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
5072 && (INTEGRAL_TYPE_P
5073 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
5074 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
5075 t = build1 (code, type,
5076 build (COND_EXPR,
5077 TREE_TYPE (TREE_OPERAND
5078 (TREE_OPERAND (t, 1), 0)),
5079 TREE_OPERAND (t, 0),
5080 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
5081 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
5082 return t;
5083 }
5084 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5085 return fold (build (COND_EXPR, type, arg0,
5086 fold (build1 (code, type, integer_one_node)),
5087 fold (build1 (code, type, integer_zero_node))));
5088 }
5089 else if (TREE_CODE_CLASS (code) == '<'
5090 && TREE_CODE (arg0) == COMPOUND_EXPR)
5091 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5092 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5093 else if (TREE_CODE_CLASS (code) == '<'
5094 && TREE_CODE (arg1) == COMPOUND_EXPR)
5095 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5096 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5097 else if (TREE_CODE_CLASS (code) == '2'
5098 || TREE_CODE_CLASS (code) == '<')
5099 {
5100 if (TREE_CODE (arg1) == COMPOUND_EXPR
5101 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1, 0))
5102 && ! TREE_SIDE_EFFECTS (arg0))
5103 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5104 fold (build (code, type,
5105 arg0, TREE_OPERAND (arg1, 1))));
5106 else if ((TREE_CODE (arg1) == COND_EXPR
5107 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
5108 && TREE_CODE_CLASS (code) != '<'))
5109 && (TREE_CODE (arg0) != COND_EXPR
5110 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5111 && (! TREE_SIDE_EFFECTS (arg0)
5112 || ((*lang_hooks.decls.global_bindings_p) () == 0
5113 && ! CONTAINS_PLACEHOLDER_P (arg0))))
5114 return
5115 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5116 /*cond_first_p=*/0);
5117 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
5118 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5119 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5120 else if ((TREE_CODE (arg0) == COND_EXPR
5121 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5122 && TREE_CODE_CLASS (code) != '<'))
5123 && (TREE_CODE (arg1) != COND_EXPR
5124 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5125 && (! TREE_SIDE_EFFECTS (arg1)
5126 || ((*lang_hooks.decls.global_bindings_p) () == 0
5127 && ! CONTAINS_PLACEHOLDER_P (arg1))))
5128 return
5129 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5130 /*cond_first_p=*/1);
5131 }
5132
5133 switch (code)
5134 {
5135 case INTEGER_CST:
5136 case REAL_CST:
5137 case VECTOR_CST:
5138 case STRING_CST:
5139 case COMPLEX_CST:
5140 case CONSTRUCTOR:
5141 return t;
5142
5143 case CONST_DECL:
5144 return fold (DECL_INITIAL (t));
5145
5146 case NOP_EXPR:
5147 case FLOAT_EXPR:
5148 case CONVERT_EXPR:
5149 case FIX_TRUNC_EXPR:
5150 /* Other kinds of FIX are not handled properly by fold_convert. */
5151
5152 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
5153 return TREE_OPERAND (t, 0);
5154
5155 /* Handle cases of two conversions in a row. */
5156 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5157 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5158 {
5159 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5160 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5161 tree final_type = TREE_TYPE (t);
5162 int inside_int = INTEGRAL_TYPE_P (inside_type);
5163 int inside_ptr = POINTER_TYPE_P (inside_type);
5164 int inside_float = FLOAT_TYPE_P (inside_type);
5165 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5166 int inside_unsignedp = TREE_UNSIGNED (inside_type);
5167 int inter_int = INTEGRAL_TYPE_P (inter_type);
5168 int inter_ptr = POINTER_TYPE_P (inter_type);
5169 int inter_float = FLOAT_TYPE_P (inter_type);
5170 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5171 int inter_unsignedp = TREE_UNSIGNED (inter_type);
5172 int final_int = INTEGRAL_TYPE_P (final_type);
5173 int final_ptr = POINTER_TYPE_P (final_type);
5174 int final_float = FLOAT_TYPE_P (final_type);
5175 unsigned int final_prec = TYPE_PRECISION (final_type);
5176 int final_unsignedp = TREE_UNSIGNED (final_type);
5177
5178 /* In addition to the cases of two conversions in a row
5179 handled below, if we are converting something to its own
5180 type via an object of identical or wider precision, neither
5181 conversion is needed. */
5182 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
5183 && ((inter_int && final_int) || (inter_float && final_float))
5184 && inter_prec >= final_prec)
5185 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5186
5187 /* Likewise, if the intermediate and final types are either both
5188 float or both integer, we don't need the middle conversion if
5189 it is wider than the final type and doesn't change the signedness
5190 (for integers). Avoid this if the final type is a pointer
5191 since then we sometimes need the inner conversion. Likewise if
5192 the outer has a precision not equal to the size of its mode. */
5193 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5194 || (inter_float && inside_float))
5195 && inter_prec >= inside_prec
5196 && (inter_float || inter_unsignedp == inside_unsignedp)
5197 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5198 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5199 && ! final_ptr)
5200 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5201
5202 /* If we have a sign-extension of a zero-extended value, we can
5203 replace that by a single zero-extension. */
5204 if (inside_int && inter_int && final_int
5205 && inside_prec < inter_prec && inter_prec < final_prec
5206 && inside_unsignedp && !inter_unsignedp)
5207 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5208
5209 /* Two conversions in a row are not needed unless:
5210 - some conversion is floating-point (overstrict for now), or
5211 - the intermediate type is narrower than both initial and
5212 final, or
5213 - the intermediate type and innermost type differ in signedness,
5214 and the outermost type is wider than the intermediate, or
5215 - the initial type is a pointer type and the precisions of the
5216 intermediate and final types differ, or
5217 - the final type is a pointer type and the precisions of the
5218 initial and intermediate types differ. */
5219 if (! inside_float && ! inter_float && ! final_float
5220 && (inter_prec > inside_prec || inter_prec > final_prec)
5221 && ! (inside_int && inter_int
5222 && inter_unsignedp != inside_unsignedp
5223 && inter_prec < final_prec)
5224 && ((inter_unsignedp && inter_prec > inside_prec)
5225 == (final_unsignedp && final_prec > inter_prec))
5226 && ! (inside_ptr && inter_prec != final_prec)
5227 && ! (final_ptr && inside_prec != inter_prec)
5228 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5229 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5230 && ! final_ptr)
5231 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5232 }
5233
5234 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5235 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5236 /* Detect assigning a bitfield. */
5237 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5238 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5239 {
5240 /* Don't leave an assignment inside a conversion
5241 unless assigning a bitfield. */
5242 tree prev = TREE_OPERAND (t, 0);
5243 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
5244 /* First do the assignment, then return converted constant. */
5245 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
5246 TREE_USED (t) = 1;
5247 return t;
5248 }
5249
5250 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5251 constants (if x has signed type, the sign bit cannot be set
5252 in c). This folds extension into the BIT_AND_EXPR. */
5253 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
5254 && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
5255 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5256 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5257 {
5258 tree and = TREE_OPERAND (t, 0);
5259 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5260 int change = 0;
5261
5262 if (TREE_UNSIGNED (TREE_TYPE (and))
5263 || (TYPE_PRECISION (TREE_TYPE (t))
5264 <= TYPE_PRECISION (TREE_TYPE (and))))
5265 change = 1;
5266 else if (TYPE_PRECISION (TREE_TYPE (and1))
5267 <= HOST_BITS_PER_WIDE_INT
5268 && host_integerp (and1, 1))
5269 {
5270 unsigned HOST_WIDE_INT cst;
5271
5272 cst = tree_low_cst (and1, 1);
5273 cst &= (HOST_WIDE_INT) -1
5274 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5275 change = (cst == 0);
5276 #ifdef LOAD_EXTEND_OP
5277 if (change
5278 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5279 == ZERO_EXTEND))
5280 {
5281 tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
5282 and0 = convert (uns, and0);
5283 and1 = convert (uns, and1);
5284 }
5285 #endif
5286 }
5287 if (change)
5288 return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
5289 convert (TREE_TYPE (t), and0),
5290 convert (TREE_TYPE (t), and1)));
5291 }
5292
5293 if (!wins)
5294 {
5295 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
5296 return t;
5297 }
5298 return fold_convert (t, arg0);
5299
5300 case VIEW_CONVERT_EXPR:
5301 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5302 return build1 (VIEW_CONVERT_EXPR, type,
5303 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5304 return t;
5305
5306 case COMPONENT_REF:
5307 if (TREE_CODE (arg0) == CONSTRUCTOR
5308 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
5309 {
5310 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5311 if (m)
5312 t = TREE_VALUE (m);
5313 }
5314 return t;
5315
5316 case RANGE_EXPR:
5317 TREE_CONSTANT (t) = wins;
5318 return t;
5319
5320 case NEGATE_EXPR:
5321 if (wins)
5322 {
5323 if (TREE_CODE (arg0) == INTEGER_CST)
5324 {
5325 unsigned HOST_WIDE_INT low;
5326 HOST_WIDE_INT high;
5327 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5328 TREE_INT_CST_HIGH (arg0),
5329 &low, &high);
5330 t = build_int_2 (low, high);
5331 TREE_TYPE (t) = type;
5332 TREE_OVERFLOW (t)
5333 = (TREE_OVERFLOW (arg0)
5334 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
5335 TREE_CONSTANT_OVERFLOW (t)
5336 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5337 }
5338 else if (TREE_CODE (arg0) == REAL_CST)
5339 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5340 }
5341 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5342 return TREE_OPERAND (arg0, 0);
5343 /* Convert -((double)float) into (double)(-float). */
5344 else if (TREE_CODE (arg0) == NOP_EXPR
5345 && TREE_CODE (type) == REAL_TYPE)
5346 {
5347 tree targ0 = strip_float_extensions (arg0);
5348 if (targ0 != arg0)
5349 return convert (type, build1 (NEGATE_EXPR, TREE_TYPE (targ0), targ0));
5350
5351 }
5352
5353 /* Convert - (a - b) to (b - a) for non-floating-point. */
5354 else if (TREE_CODE (arg0) == MINUS_EXPR
5355 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
5356 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
5357 TREE_OPERAND (arg0, 0));
5358
5359 /* Convert -f(x) into f(-x) where f is sin, tan or atan. */
5360 switch (builtin_mathfn_code (arg0))
5361 {
5362 case BUILT_IN_SIN:
5363 case BUILT_IN_SINF:
5364 case BUILT_IN_SINL:
5365 case BUILT_IN_TAN:
5366 case BUILT_IN_TANF:
5367 case BUILT_IN_TANL:
5368 case BUILT_IN_ATAN:
5369 case BUILT_IN_ATANF:
5370 case BUILT_IN_ATANL:
5371 if (negate_expr_p (TREE_VALUE (TREE_OPERAND (arg0, 1))))
5372 {
5373 tree fndecl, arg, arglist;
5374
5375 fndecl = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5376 arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5377 arg = fold (build1 (NEGATE_EXPR, type, arg));
5378 arglist = build_tree_list (NULL_TREE, arg);
5379 return build_function_call_expr (fndecl, arglist);
5380 }
5381 break;
5382
5383 default:
5384 break;
5385 }
5386 return t;
5387
5388 case ABS_EXPR:
5389 if (wins)
5390 {
5391 if (TREE_CODE (arg0) == INTEGER_CST)
5392 {
5393 /* If the value is unsigned, then the absolute value is
5394 the same as the ordinary value. */
5395 if (TREE_UNSIGNED (type))
5396 return arg0;
5397 /* Similarly, if the value is non-negative. */
5398 else if (INT_CST_LT (integer_minus_one_node, arg0))
5399 return arg0;
5400 /* If the value is negative, then the absolute value is
5401 its negation. */
5402 else
5403 {
5404 unsigned HOST_WIDE_INT low;
5405 HOST_WIDE_INT high;
5406 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5407 TREE_INT_CST_HIGH (arg0),
5408 &low, &high);
5409 t = build_int_2 (low, high);
5410 TREE_TYPE (t) = type;
5411 TREE_OVERFLOW (t)
5412 = (TREE_OVERFLOW (arg0)
5413 | force_fit_type (t, overflow));
5414 TREE_CONSTANT_OVERFLOW (t)
5415 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5416 }
5417 }
5418 else if (TREE_CODE (arg0) == REAL_CST)
5419 {
5420 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5421 t = build_real (type,
5422 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5423 }
5424 }
5425 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5426 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
5427 /* Convert fabs((double)float) into (double)fabsf(float). */
5428 else if (TREE_CODE (arg0) == NOP_EXPR
5429 && TREE_CODE (type) == REAL_TYPE)
5430 {
5431 tree targ0 = strip_float_extensions (arg0);
5432 if (targ0 != arg0)
5433 return convert (type, fold (build1 (ABS_EXPR, TREE_TYPE (targ0),
5434 targ0)));
5435 }
5436 else if (tree_expr_nonnegative_p (arg0))
5437 return arg0;
5438 return t;
5439
5440 case CONJ_EXPR:
5441 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5442 return convert (type, arg0);
5443 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5444 return build (COMPLEX_EXPR, type,
5445 TREE_OPERAND (arg0, 0),
5446 negate_expr (TREE_OPERAND (arg0, 1)));
5447 else if (TREE_CODE (arg0) == COMPLEX_CST)
5448 return build_complex (type, TREE_REALPART (arg0),
5449 negate_expr (TREE_IMAGPART (arg0)));
5450 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5451 return fold (build (TREE_CODE (arg0), type,
5452 fold (build1 (CONJ_EXPR, type,
5453 TREE_OPERAND (arg0, 0))),
5454 fold (build1 (CONJ_EXPR,
5455 type, TREE_OPERAND (arg0, 1)))));
5456 else if (TREE_CODE (arg0) == CONJ_EXPR)
5457 return TREE_OPERAND (arg0, 0);
5458 return t;
5459
5460 case BIT_NOT_EXPR:
5461 if (wins)
5462 {
5463 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5464 ~ TREE_INT_CST_HIGH (arg0));
5465 TREE_TYPE (t) = type;
5466 force_fit_type (t, 0);
5467 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5468 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5469 }
5470 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5471 return TREE_OPERAND (arg0, 0);
5472 return t;
5473
5474 case PLUS_EXPR:
5475 /* A + (-B) -> A - B */
5476 if (TREE_CODE (arg1) == NEGATE_EXPR)
5477 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5478 /* (-A) + B -> B - A */
5479 if (TREE_CODE (arg0) == NEGATE_EXPR)
5480 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5481 else if (! FLOAT_TYPE_P (type))
5482 {
5483 if (integer_zerop (arg1))
5484 return non_lvalue (convert (type, arg0));
5485
5486 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5487 with a constant, and the two constants have no bits in common,
5488 we should treat this as a BIT_IOR_EXPR since this may produce more
5489 simplifications. */
5490 if (TREE_CODE (arg0) == BIT_AND_EXPR
5491 && TREE_CODE (arg1) == BIT_AND_EXPR
5492 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5493 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5494 && integer_zerop (const_binop (BIT_AND_EXPR,
5495 TREE_OPERAND (arg0, 1),
5496 TREE_OPERAND (arg1, 1), 0)))
5497 {
5498 code = BIT_IOR_EXPR;
5499 goto bit_ior;
5500 }
5501
5502 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5503 (plus (plus (mult) (mult)) (foo)) so that we can
5504 take advantage of the factoring cases below. */
5505 if ((TREE_CODE (arg0) == PLUS_EXPR
5506 && TREE_CODE (arg1) == MULT_EXPR)
5507 || (TREE_CODE (arg1) == PLUS_EXPR
5508 && TREE_CODE (arg0) == MULT_EXPR))
5509 {
5510 tree parg0, parg1, parg, marg;
5511
5512 if (TREE_CODE (arg0) == PLUS_EXPR)
5513 parg = arg0, marg = arg1;
5514 else
5515 parg = arg1, marg = arg0;
5516 parg0 = TREE_OPERAND (parg, 0);
5517 parg1 = TREE_OPERAND (parg, 1);
5518 STRIP_NOPS (parg0);
5519 STRIP_NOPS (parg1);
5520
5521 if (TREE_CODE (parg0) == MULT_EXPR
5522 && TREE_CODE (parg1) != MULT_EXPR)
5523 return fold (build (PLUS_EXPR, type,
5524 fold (build (PLUS_EXPR, type,
5525 convert (type, parg0),
5526 convert (type, marg))),
5527 convert (type, parg1)));
5528 if (TREE_CODE (parg0) != MULT_EXPR
5529 && TREE_CODE (parg1) == MULT_EXPR)
5530 return fold (build (PLUS_EXPR, type,
5531 fold (build (PLUS_EXPR, type,
5532 convert (type, parg1),
5533 convert (type, marg))),
5534 convert (type, parg0)));
5535 }
5536
5537 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5538 {
5539 tree arg00, arg01, arg10, arg11;
5540 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5541
5542 /* (A * C) + (B * C) -> (A+B) * C.
5543 We are most concerned about the case where C is a constant,
5544 but other combinations show up during loop reduction. Since
5545 it is not difficult, try all four possibilities. */
5546
5547 arg00 = TREE_OPERAND (arg0, 0);
5548 arg01 = TREE_OPERAND (arg0, 1);
5549 arg10 = TREE_OPERAND (arg1, 0);
5550 arg11 = TREE_OPERAND (arg1, 1);
5551 same = NULL_TREE;
5552
5553 if (operand_equal_p (arg01, arg11, 0))
5554 same = arg01, alt0 = arg00, alt1 = arg10;
5555 else if (operand_equal_p (arg00, arg10, 0))
5556 same = arg00, alt0 = arg01, alt1 = arg11;
5557 else if (operand_equal_p (arg00, arg11, 0))
5558 same = arg00, alt0 = arg01, alt1 = arg10;
5559 else if (operand_equal_p (arg01, arg10, 0))
5560 same = arg01, alt0 = arg00, alt1 = arg11;
5561
5562 /* No identical multiplicands; see if we can find a common
5563 power-of-two factor in non-power-of-two multiplies. This
5564 can help in multi-dimensional array access. */
5565 else if (TREE_CODE (arg01) == INTEGER_CST
5566 && TREE_CODE (arg11) == INTEGER_CST
5567 && TREE_INT_CST_HIGH (arg01) == 0
5568 && TREE_INT_CST_HIGH (arg11) == 0)
5569 {
5570 HOST_WIDE_INT int01, int11, tmp;
5571 int01 = TREE_INT_CST_LOW (arg01);
5572 int11 = TREE_INT_CST_LOW (arg11);
5573
5574 /* Move min of absolute values to int11. */
5575 if ((int01 >= 0 ? int01 : -int01)
5576 < (int11 >= 0 ? int11 : -int11))
5577 {
5578 tmp = int01, int01 = int11, int11 = tmp;
5579 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5580 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5581 }
5582
5583 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5584 {
5585 alt0 = fold (build (MULT_EXPR, type, arg00,
5586 build_int_2 (int01 / int11, 0)));
5587 alt1 = arg10;
5588 same = arg11;
5589 }
5590 }
5591
5592 if (same)
5593 return fold (build (MULT_EXPR, type,
5594 fold (build (PLUS_EXPR, type, alt0, alt1)),
5595 same));
5596 }
5597 }
5598
5599 /* See if ARG1 is zero and X + ARG1 reduces to X. */
5600 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
5601 return non_lvalue (convert (type, arg0));
5602
5603 /* Likewise if the operands are reversed. */
5604 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5605 return non_lvalue (convert (type, arg1));
5606
5607 bit_rotate:
5608 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5609 is a rotate of A by C1 bits. */
5610 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5611 is a rotate of A by B bits. */
5612 {
5613 enum tree_code code0, code1;
5614 code0 = TREE_CODE (arg0);
5615 code1 = TREE_CODE (arg1);
5616 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5617 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5618 && operand_equal_p (TREE_OPERAND (arg0, 0),
5619 TREE_OPERAND (arg1, 0), 0)
5620 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5621 {
5622 tree tree01, tree11;
5623 enum tree_code code01, code11;
5624
5625 tree01 = TREE_OPERAND (arg0, 1);
5626 tree11 = TREE_OPERAND (arg1, 1);
5627 STRIP_NOPS (tree01);
5628 STRIP_NOPS (tree11);
5629 code01 = TREE_CODE (tree01);
5630 code11 = TREE_CODE (tree11);
5631 if (code01 == INTEGER_CST
5632 && code11 == INTEGER_CST
5633 && TREE_INT_CST_HIGH (tree01) == 0
5634 && TREE_INT_CST_HIGH (tree11) == 0
5635 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5636 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5637 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5638 code0 == LSHIFT_EXPR ? tree01 : tree11);
5639 else if (code11 == MINUS_EXPR)
5640 {
5641 tree tree110, tree111;
5642 tree110 = TREE_OPERAND (tree11, 0);
5643 tree111 = TREE_OPERAND (tree11, 1);
5644 STRIP_NOPS (tree110);
5645 STRIP_NOPS (tree111);
5646 if (TREE_CODE (tree110) == INTEGER_CST
5647 && 0 == compare_tree_int (tree110,
5648 TYPE_PRECISION
5649 (TREE_TYPE (TREE_OPERAND
5650 (arg0, 0))))
5651 && operand_equal_p (tree01, tree111, 0))
5652 return build ((code0 == LSHIFT_EXPR
5653 ? LROTATE_EXPR
5654 : RROTATE_EXPR),
5655 type, TREE_OPERAND (arg0, 0), tree01);
5656 }
5657 else if (code01 == MINUS_EXPR)
5658 {
5659 tree tree010, tree011;
5660 tree010 = TREE_OPERAND (tree01, 0);
5661 tree011 = TREE_OPERAND (tree01, 1);
5662 STRIP_NOPS (tree010);
5663 STRIP_NOPS (tree011);
5664 if (TREE_CODE (tree010) == INTEGER_CST
5665 && 0 == compare_tree_int (tree010,
5666 TYPE_PRECISION
5667 (TREE_TYPE (TREE_OPERAND
5668 (arg0, 0))))
5669 && operand_equal_p (tree11, tree011, 0))
5670 return build ((code0 != LSHIFT_EXPR
5671 ? LROTATE_EXPR
5672 : RROTATE_EXPR),
5673 type, TREE_OPERAND (arg0, 0), tree11);
5674 }
5675 }
5676 }
5677
5678 associate:
5679 /* In most languages, can't associate operations on floats through
5680 parentheses. Rather than remember where the parentheses were, we
5681 don't associate floats at all. It shouldn't matter much. However,
5682 associating multiplications is only very slightly inaccurate, so do
5683 that if -funsafe-math-optimizations is specified. */
5684
5685 if (! wins
5686 && (! FLOAT_TYPE_P (type)
5687 || (flag_unsafe_math_optimizations && code == MULT_EXPR)))
5688 {
5689 tree var0, con0, lit0, minus_lit0;
5690 tree var1, con1, lit1, minus_lit1;
5691
5692 /* Split both trees into variables, constants, and literals. Then
5693 associate each group together, the constants with literals,
5694 then the result with variables. This increases the chances of
5695 literals being recombined later and of generating relocatable
5696 expressions for the sum of a constant and literal. */
5697 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
5698 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
5699 code == MINUS_EXPR);
5700
5701 /* Only do something if we found more than two objects. Otherwise,
5702 nothing has changed and we risk infinite recursion. */
5703 if (2 < ((var0 != 0) + (var1 != 0)
5704 + (con0 != 0) + (con1 != 0)
5705 + (lit0 != 0) + (lit1 != 0)
5706 + (minus_lit0 != 0) + (minus_lit1 != 0)))
5707 {
5708 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
5709 if (code == MINUS_EXPR)
5710 code = PLUS_EXPR;
5711
5712 var0 = associate_trees (var0, var1, code, type);
5713 con0 = associate_trees (con0, con1, code, type);
5714 lit0 = associate_trees (lit0, lit1, code, type);
5715 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
5716
5717 /* Preserve the MINUS_EXPR if the negative part of the literal is
5718 greater than the positive part. Otherwise, the multiplicative
5719 folding code (i.e extract_muldiv) may be fooled in case
5720 unsigned constants are subtracted, like in the following
5721 example: ((X*2 + 4) - 8U)/2. */
5722 if (minus_lit0 && lit0)
5723 {
5724 if (tree_int_cst_lt (lit0, minus_lit0))
5725 {
5726 minus_lit0 = associate_trees (minus_lit0, lit0,
5727 MINUS_EXPR, type);
5728 lit0 = 0;
5729 }
5730 else
5731 {
5732 lit0 = associate_trees (lit0, minus_lit0,
5733 MINUS_EXPR, type);
5734 minus_lit0 = 0;
5735 }
5736 }
5737 if (minus_lit0)
5738 {
5739 if (con0 == 0)
5740 return convert (type, associate_trees (var0, minus_lit0,
5741 MINUS_EXPR, type));
5742 else
5743 {
5744 con0 = associate_trees (con0, minus_lit0,
5745 MINUS_EXPR, type);
5746 return convert (type, associate_trees (var0, con0,
5747 PLUS_EXPR, type));
5748 }
5749 }
5750
5751 con0 = associate_trees (con0, lit0, code, type);
5752 return convert (type, associate_trees (var0, con0, code, type));
5753 }
5754 }
5755
5756 binary:
5757 if (wins)
5758 t1 = const_binop (code, arg0, arg1, 0);
5759 if (t1 != NULL_TREE)
5760 {
5761 /* The return value should always have
5762 the same type as the original expression. */
5763 if (TREE_TYPE (t1) != TREE_TYPE (t))
5764 t1 = convert (TREE_TYPE (t), t1);
5765
5766 return t1;
5767 }
5768 return t;
5769
5770 case MINUS_EXPR:
5771 /* A - (-B) -> A + B */
5772 if (TREE_CODE (arg1) == NEGATE_EXPR)
5773 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5774 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
5775 if (TREE_CODE (arg0) == NEGATE_EXPR
5776 && (FLOAT_TYPE_P (type)
5777 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
5778 && negate_expr_p (arg1)
5779 && (! TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
5780 && (! TREE_SIDE_EFFECTS (arg1) || TREE_CONSTANT (arg0)))
5781 return fold (build (MINUS_EXPR, type, negate_expr (arg1),
5782 TREE_OPERAND (arg0, 0)));
5783
5784 if (! FLOAT_TYPE_P (type))
5785 {
5786 if (! wins && integer_zerop (arg0))
5787 return negate_expr (convert (type, arg1));
5788 if (integer_zerop (arg1))
5789 return non_lvalue (convert (type, arg0));
5790
5791 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
5792 about the case where C is a constant, just try one of the
5793 four possibilities. */
5794
5795 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
5796 && operand_equal_p (TREE_OPERAND (arg0, 1),
5797 TREE_OPERAND (arg1, 1), 0))
5798 return fold (build (MULT_EXPR, type,
5799 fold (build (MINUS_EXPR, type,
5800 TREE_OPERAND (arg0, 0),
5801 TREE_OPERAND (arg1, 0))),
5802 TREE_OPERAND (arg0, 1)));
5803
5804 /* Fold A - (A & B) into ~B & A. */
5805 if (!TREE_SIDE_EFFECTS (arg0)
5806 && TREE_CODE (arg1) == BIT_AND_EXPR)
5807 {
5808 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
5809 return fold (build (BIT_AND_EXPR, type,
5810 fold (build1 (BIT_NOT_EXPR, type,
5811 TREE_OPERAND (arg1, 0))),
5812 arg0));
5813 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
5814 return fold (build (BIT_AND_EXPR, type,
5815 fold (build1 (BIT_NOT_EXPR, type,
5816 TREE_OPERAND (arg1, 1))),
5817 arg0));
5818 }
5819 }
5820
5821 /* See if ARG1 is zero and X - ARG1 reduces to X. */
5822 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
5823 return non_lvalue (convert (type, arg0));
5824
5825 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
5826 ARG0 is zero and X + ARG0 reduces to X, since that would mean
5827 (-ARG1 + ARG0) reduces to -ARG1. */
5828 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5829 return negate_expr (convert (type, arg1));
5830
5831 /* Fold &x - &x. This can happen from &x.foo - &x.
5832 This is unsafe for certain floats even in non-IEEE formats.
5833 In IEEE, it is unsafe because it does wrong for NaNs.
5834 Also note that operand_equal_p is always false if an operand
5835 is volatile. */
5836
5837 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
5838 && operand_equal_p (arg0, arg1, 0))
5839 return convert (type, integer_zero_node);
5840
5841 goto associate;
5842
5843 case MULT_EXPR:
5844 /* (-A) * (-B) -> A * B */
5845 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5846 return fold (build (MULT_EXPR, type, TREE_OPERAND (arg0, 0),
5847 TREE_OPERAND (arg1, 0)));
5848
5849 if (! FLOAT_TYPE_P (type))
5850 {
5851 if (integer_zerop (arg1))
5852 return omit_one_operand (type, arg1, arg0);
5853 if (integer_onep (arg1))
5854 return non_lvalue (convert (type, arg0));
5855
5856 /* (a * (1 << b)) is (a << b) */
5857 if (TREE_CODE (arg1) == LSHIFT_EXPR
5858 && integer_onep (TREE_OPERAND (arg1, 0)))
5859 return fold (build (LSHIFT_EXPR, type, arg0,
5860 TREE_OPERAND (arg1, 1)));
5861 if (TREE_CODE (arg0) == LSHIFT_EXPR
5862 && integer_onep (TREE_OPERAND (arg0, 0)))
5863 return fold (build (LSHIFT_EXPR, type, arg1,
5864 TREE_OPERAND (arg0, 1)));
5865
5866 if (TREE_CODE (arg1) == INTEGER_CST
5867 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
5868 convert (type, arg1),
5869 code, NULL_TREE)))
5870 return convert (type, tem);
5871
5872 }
5873 else
5874 {
5875 /* Maybe fold x * 0 to 0. The expressions aren't the same
5876 when x is NaN, since x * 0 is also NaN. Nor are they the
5877 same in modes with signed zeros, since multiplying a
5878 negative value by 0 gives -0, not +0. */
5879 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
5880 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
5881 && real_zerop (arg1))
5882 return omit_one_operand (type, arg1, arg0);
5883 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
5884 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
5885 && real_onep (arg1))
5886 return non_lvalue (convert (type, arg0));
5887
5888 /* Transform x * -1.0 into -x. */
5889 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
5890 && real_minus_onep (arg1))
5891 return fold (build1 (NEGATE_EXPR, type, arg0));
5892
5893 /* x*2 is x+x */
5894 if (! wins && real_twop (arg1)
5895 && (*lang_hooks.decls.global_bindings_p) () == 0
5896 && ! CONTAINS_PLACEHOLDER_P (arg0))
5897 {
5898 tree arg = save_expr (arg0);
5899 return fold (build (PLUS_EXPR, type, arg, arg));
5900 }
5901
5902 if (flag_unsafe_math_optimizations)
5903 {
5904 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
5905 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
5906
5907 /* Optimizations of sqrt(...)*sqrt(...). */
5908 if ((fcode0 == BUILT_IN_SQRT && fcode1 == BUILT_IN_SQRT)
5909 || (fcode0 == BUILT_IN_SQRTF && fcode1 == BUILT_IN_SQRTF)
5910 || (fcode0 == BUILT_IN_SQRTL && fcode1 == BUILT_IN_SQRTL))
5911 {
5912 tree sqrtfn, arg, arglist;
5913 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
5914 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
5915
5916 /* Optimize sqrt(x)*sqrt(x) as x. */
5917 if (operand_equal_p (arg00, arg10, 0)
5918 && ! HONOR_SNANS (TYPE_MODE (type)))
5919 return arg00;
5920
5921 /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y). */
5922 sqrtfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5923 arg = fold (build (MULT_EXPR, type, arg00, arg10));
5924 arglist = build_tree_list (NULL_TREE, arg);
5925 return build_function_call_expr (sqrtfn, arglist);
5926 }
5927
5928 /* Optimize exp(x)*exp(y) as exp(x+y). */
5929 if ((fcode0 == BUILT_IN_EXP && fcode1 == BUILT_IN_EXP)
5930 || (fcode0 == BUILT_IN_EXPF && fcode1 == BUILT_IN_EXPF)
5931 || (fcode0 == BUILT_IN_EXPL && fcode1 == BUILT_IN_EXPL))
5932 {
5933 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5934 tree arg = build (PLUS_EXPR, type,
5935 TREE_VALUE (TREE_OPERAND (arg0, 1)),
5936 TREE_VALUE (TREE_OPERAND (arg1, 1)));
5937 tree arglist = build_tree_list (NULL_TREE, fold (arg));
5938 return build_function_call_expr (expfn, arglist);
5939 }
5940
5941 /* Optimizations of pow(...)*pow(...). */
5942 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
5943 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
5944 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
5945 {
5946 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
5947 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
5948 1)));
5949 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
5950 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
5951 1)));
5952
5953 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
5954 if (operand_equal_p (arg01, arg11, 0))
5955 {
5956 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5957 tree arg = build (MULT_EXPR, type, arg00, arg10);
5958 tree arglist = tree_cons (NULL_TREE, fold (arg),
5959 build_tree_list (NULL_TREE,
5960 arg01));
5961 return build_function_call_expr (powfn, arglist);
5962 }
5963
5964 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
5965 if (operand_equal_p (arg00, arg10, 0))
5966 {
5967 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5968 tree arg = fold (build (PLUS_EXPR, type, arg01, arg11));
5969 tree arglist = tree_cons (NULL_TREE, arg00,
5970 build_tree_list (NULL_TREE,
5971 arg));
5972 return build_function_call_expr (powfn, arglist);
5973 }
5974 }
5975
5976 /* Optimize tan(x)*cos(x) as sin(x). */
5977 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
5978 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
5979 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
5980 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
5981 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
5982 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
5983 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
5984 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
5985 {
5986 tree sinfn;
5987
5988 switch (fcode0)
5989 {
5990 case BUILT_IN_TAN:
5991 case BUILT_IN_COS:
5992 sinfn = implicit_built_in_decls[BUILT_IN_SIN];
5993 break;
5994 case BUILT_IN_TANF:
5995 case BUILT_IN_COSF:
5996 sinfn = implicit_built_in_decls[BUILT_IN_SINF];
5997 break;
5998 case BUILT_IN_TANL:
5999 case BUILT_IN_COSL:
6000 sinfn = implicit_built_in_decls[BUILT_IN_SINL];
6001 break;
6002 default:
6003 sinfn = NULL_TREE;
6004 }
6005
6006 if (sinfn != NULL_TREE)
6007 return build_function_call_expr (sinfn,
6008 TREE_OPERAND (arg0, 1));
6009 }
6010 }
6011 }
6012 goto associate;
6013
6014 case BIT_IOR_EXPR:
6015 bit_ior:
6016 if (integer_all_onesp (arg1))
6017 return omit_one_operand (type, arg1, arg0);
6018 if (integer_zerop (arg1))
6019 return non_lvalue (convert (type, arg0));
6020 t1 = distribute_bit_expr (code, type, arg0, arg1);
6021 if (t1 != NULL_TREE)
6022 return t1;
6023
6024 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6025
6026 This results in more efficient code for machines without a NAND
6027 instruction. Combine will canonicalize to the first form
6028 which will allow use of NAND instructions provided by the
6029 backend if they exist. */
6030 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6031 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6032 {
6033 return fold (build1 (BIT_NOT_EXPR, type,
6034 build (BIT_AND_EXPR, type,
6035 TREE_OPERAND (arg0, 0),
6036 TREE_OPERAND (arg1, 0))));
6037 }
6038
6039 /* See if this can be simplified into a rotate first. If that
6040 is unsuccessful continue in the association code. */
6041 goto bit_rotate;
6042
6043 case BIT_XOR_EXPR:
6044 if (integer_zerop (arg1))
6045 return non_lvalue (convert (type, arg0));
6046 if (integer_all_onesp (arg1))
6047 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6048
6049 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6050 with a constant, and the two constants have no bits in common,
6051 we should treat this as a BIT_IOR_EXPR since this may produce more
6052 simplifications. */
6053 if (TREE_CODE (arg0) == BIT_AND_EXPR
6054 && TREE_CODE (arg1) == BIT_AND_EXPR
6055 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6056 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6057 && integer_zerop (const_binop (BIT_AND_EXPR,
6058 TREE_OPERAND (arg0, 1),
6059 TREE_OPERAND (arg1, 1), 0)))
6060 {
6061 code = BIT_IOR_EXPR;
6062 goto bit_ior;
6063 }
6064
6065 /* See if this can be simplified into a rotate first. If that
6066 is unsuccessful continue in the association code. */
6067 goto bit_rotate;
6068
6069 case BIT_AND_EXPR:
6070 bit_and:
6071 if (integer_all_onesp (arg1))
6072 return non_lvalue (convert (type, arg0));
6073 if (integer_zerop (arg1))
6074 return omit_one_operand (type, arg1, arg0);
6075 t1 = distribute_bit_expr (code, type, arg0, arg1);
6076 if (t1 != NULL_TREE)
6077 return t1;
6078 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6079 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6080 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6081 {
6082 unsigned int prec
6083 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6084
6085 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6086 && (~TREE_INT_CST_LOW (arg1)
6087 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6088 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
6089 }
6090
6091 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6092
6093 This results in more efficient code for machines without a NOR
6094 instruction. Combine will canonicalize to the first form
6095 which will allow use of NOR instructions provided by the
6096 backend if they exist. */
6097 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6098 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6099 {
6100 return fold (build1 (BIT_NOT_EXPR, type,
6101 build (BIT_IOR_EXPR, type,
6102 TREE_OPERAND (arg0, 0),
6103 TREE_OPERAND (arg1, 0))));
6104 }
6105
6106 goto associate;
6107
6108 case BIT_ANDTC_EXPR:
6109 if (integer_all_onesp (arg0))
6110 return non_lvalue (convert (type, arg1));
6111 if (integer_zerop (arg0))
6112 return omit_one_operand (type, arg0, arg1);
6113 if (TREE_CODE (arg1) == INTEGER_CST)
6114 {
6115 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
6116 code = BIT_AND_EXPR;
6117 goto bit_and;
6118 }
6119 goto binary;
6120
6121 case RDIV_EXPR:
6122 /* Don't touch a floating-point divide by zero unless the mode
6123 of the constant can represent infinity. */
6124 if (TREE_CODE (arg1) == REAL_CST
6125 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6126 && real_zerop (arg1))
6127 return t;
6128
6129 /* (-A) / (-B) -> A / B */
6130 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
6131 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6132 TREE_OPERAND (arg1, 0)));
6133
6134 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6135 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6136 && real_onep (arg1))
6137 return non_lvalue (convert (type, arg0));
6138
6139 /* If ARG1 is a constant, we can convert this to a multiply by the
6140 reciprocal. This does not have the same rounding properties,
6141 so only do this if -funsafe-math-optimizations. We can actually
6142 always safely do it if ARG1 is a power of two, but it's hard to
6143 tell if it is or not in a portable manner. */
6144 if (TREE_CODE (arg1) == REAL_CST)
6145 {
6146 if (flag_unsafe_math_optimizations
6147 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6148 arg1, 0)))
6149 return fold (build (MULT_EXPR, type, arg0, tem));
6150 /* Find the reciprocal if optimizing and the result is exact. */
6151 else if (optimize)
6152 {
6153 REAL_VALUE_TYPE r;
6154 r = TREE_REAL_CST (arg1);
6155 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6156 {
6157 tem = build_real (type, r);
6158 return fold (build (MULT_EXPR, type, arg0, tem));
6159 }
6160 }
6161 }
6162 /* Convert A/B/C to A/(B*C). */
6163 if (flag_unsafe_math_optimizations
6164 && TREE_CODE (arg0) == RDIV_EXPR)
6165 {
6166 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6167 build (MULT_EXPR, type, TREE_OPERAND (arg0, 1),
6168 arg1)));
6169 }
6170 /* Convert A/(B/C) to (A/B)*C. */
6171 if (flag_unsafe_math_optimizations
6172 && TREE_CODE (arg1) == RDIV_EXPR)
6173 {
6174 return fold (build (MULT_EXPR, type,
6175 build (RDIV_EXPR, type, arg0,
6176 TREE_OPERAND (arg1, 0)),
6177 TREE_OPERAND (arg1, 1)));
6178 }
6179
6180 if (flag_unsafe_math_optimizations)
6181 {
6182 enum built_in_function fcode = builtin_mathfn_code (arg1);
6183 /* Optimize x/exp(y) into x*exp(-y). */
6184 if (fcode == BUILT_IN_EXP
6185 || fcode == BUILT_IN_EXPF
6186 || fcode == BUILT_IN_EXPL)
6187 {
6188 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6189 tree arg = build1 (NEGATE_EXPR, type,
6190 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6191 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6192 arg1 = build_function_call_expr (expfn, arglist);
6193 return fold (build (MULT_EXPR, type, arg0, arg1));
6194 }
6195
6196 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6197 if (fcode == BUILT_IN_POW
6198 || fcode == BUILT_IN_POWF
6199 || fcode == BUILT_IN_POWL)
6200 {
6201 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6202 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6203 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6204 tree neg11 = fold (build1 (NEGATE_EXPR, type, arg11));
6205 tree arglist = tree_cons(NULL_TREE, arg10,
6206 build_tree_list (NULL_TREE, neg11));
6207 arg1 = build_function_call_expr (powfn, arglist);
6208 return fold (build (MULT_EXPR, type, arg0, arg1));
6209 }
6210 }
6211
6212 if (flag_unsafe_math_optimizations)
6213 {
6214 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6215 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6216
6217 /* Optimize sin(x)/cos(x) as tan(x). */
6218 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
6219 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
6220 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
6221 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6222 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6223 {
6224 tree tanfn;
6225
6226 if (fcode0 == BUILT_IN_SIN)
6227 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6228 else if (fcode0 == BUILT_IN_SINF)
6229 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6230 else if (fcode0 == BUILT_IN_SINL)
6231 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6232 else
6233 tanfn = NULL_TREE;
6234
6235 if (tanfn != NULL_TREE)
6236 return build_function_call_expr (tanfn,
6237 TREE_OPERAND (arg0, 1));
6238 }
6239
6240 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
6241 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
6242 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
6243 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
6244 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6245 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6246 {
6247 tree tanfn;
6248
6249 if (fcode0 == BUILT_IN_COS)
6250 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6251 else if (fcode0 == BUILT_IN_COSF)
6252 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6253 else if (fcode0 == BUILT_IN_COSL)
6254 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6255 else
6256 tanfn = NULL_TREE;
6257
6258 if (tanfn != NULL_TREE)
6259 {
6260 tree tmp = TREE_OPERAND (arg0, 1);
6261 tmp = build_function_call_expr (tanfn, tmp);
6262 return fold (build (RDIV_EXPR, type,
6263 build_real (type, dconst1),
6264 tmp));
6265 }
6266 }
6267 }
6268 goto binary;
6269
6270 case TRUNC_DIV_EXPR:
6271 case ROUND_DIV_EXPR:
6272 case FLOOR_DIV_EXPR:
6273 case CEIL_DIV_EXPR:
6274 case EXACT_DIV_EXPR:
6275 if (integer_onep (arg1))
6276 return non_lvalue (convert (type, arg0));
6277 if (integer_zerop (arg1))
6278 return t;
6279
6280 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6281 operation, EXACT_DIV_EXPR.
6282
6283 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6284 At one time others generated faster code, it's not clear if they do
6285 after the last round to changes to the DIV code in expmed.c. */
6286 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
6287 && multiple_of_p (type, arg0, arg1))
6288 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
6289
6290 if (TREE_CODE (arg1) == INTEGER_CST
6291 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6292 code, NULL_TREE)))
6293 return convert (type, tem);
6294
6295 goto binary;
6296
6297 case CEIL_MOD_EXPR:
6298 case FLOOR_MOD_EXPR:
6299 case ROUND_MOD_EXPR:
6300 case TRUNC_MOD_EXPR:
6301 if (integer_onep (arg1))
6302 return omit_one_operand (type, integer_zero_node, arg0);
6303 if (integer_zerop (arg1))
6304 return t;
6305
6306 if (TREE_CODE (arg1) == INTEGER_CST
6307 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6308 code, NULL_TREE)))
6309 return convert (type, tem);
6310
6311 goto binary;
6312
6313 case LROTATE_EXPR:
6314 case RROTATE_EXPR:
6315 if (integer_all_onesp (arg0))
6316 return omit_one_operand (type, arg0, arg1);
6317 goto shift;
6318
6319 case RSHIFT_EXPR:
6320 /* Optimize -1 >> x for arithmetic right shifts. */
6321 if (integer_all_onesp (arg0) && ! TREE_UNSIGNED (type))
6322 return omit_one_operand (type, arg0, arg1);
6323 /* ... fall through ... */
6324
6325 case LSHIFT_EXPR:
6326 shift:
6327 if (integer_zerop (arg1))
6328 return non_lvalue (convert (type, arg0));
6329 if (integer_zerop (arg0))
6330 return omit_one_operand (type, arg0, arg1);
6331
6332 /* Since negative shift count is not well-defined,
6333 don't try to compute it in the compiler. */
6334 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
6335 return t;
6336 /* Rewrite an LROTATE_EXPR by a constant into an
6337 RROTATE_EXPR by a new constant. */
6338 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
6339 {
6340 TREE_SET_CODE (t, RROTATE_EXPR);
6341 code = RROTATE_EXPR;
6342 TREE_OPERAND (t, 1) = arg1
6343 = const_binop
6344 (MINUS_EXPR,
6345 convert (TREE_TYPE (arg1),
6346 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
6347 arg1, 0);
6348 if (tree_int_cst_sgn (arg1) < 0)
6349 return t;
6350 }
6351
6352 /* If we have a rotate of a bit operation with the rotate count and
6353 the second operand of the bit operation both constant,
6354 permute the two operations. */
6355 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6356 && (TREE_CODE (arg0) == BIT_AND_EXPR
6357 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
6358 || TREE_CODE (arg0) == BIT_IOR_EXPR
6359 || TREE_CODE (arg0) == BIT_XOR_EXPR)
6360 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6361 return fold (build (TREE_CODE (arg0), type,
6362 fold (build (code, type,
6363 TREE_OPERAND (arg0, 0), arg1)),
6364 fold (build (code, type,
6365 TREE_OPERAND (arg0, 1), arg1))));
6366
6367 /* Two consecutive rotates adding up to the width of the mode can
6368 be ignored. */
6369 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6370 && TREE_CODE (arg0) == RROTATE_EXPR
6371 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6372 && TREE_INT_CST_HIGH (arg1) == 0
6373 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
6374 && ((TREE_INT_CST_LOW (arg1)
6375 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
6376 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
6377 return TREE_OPERAND (arg0, 0);
6378
6379 goto binary;
6380
6381 case MIN_EXPR:
6382 if (operand_equal_p (arg0, arg1, 0))
6383 return omit_one_operand (type, arg0, arg1);
6384 if (INTEGRAL_TYPE_P (type)
6385 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
6386 return omit_one_operand (type, arg1, arg0);
6387 goto associate;
6388
6389 case MAX_EXPR:
6390 if (operand_equal_p (arg0, arg1, 0))
6391 return omit_one_operand (type, arg0, arg1);
6392 if (INTEGRAL_TYPE_P (type)
6393 && TYPE_MAX_VALUE (type)
6394 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
6395 return omit_one_operand (type, arg1, arg0);
6396 goto associate;
6397
6398 case TRUTH_NOT_EXPR:
6399 /* Note that the operand of this must be an int
6400 and its values must be 0 or 1.
6401 ("true" is a fixed value perhaps depending on the language,
6402 but we don't handle values other than 1 correctly yet.) */
6403 tem = invert_truthvalue (arg0);
6404 /* Avoid infinite recursion. */
6405 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6406 {
6407 tem = fold_single_bit_test (code, arg0, arg1, type);
6408 if (tem)
6409 return tem;
6410 return t;
6411 }
6412 return convert (type, tem);
6413
6414 case TRUTH_ANDIF_EXPR:
6415 /* Note that the operands of this must be ints
6416 and their values must be 0 or 1.
6417 ("true" is a fixed value perhaps depending on the language.) */
6418 /* If first arg is constant zero, return it. */
6419 if (integer_zerop (arg0))
6420 return convert (type, arg0);
6421 case TRUTH_AND_EXPR:
6422 /* If either arg is constant true, drop it. */
6423 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6424 return non_lvalue (convert (type, arg1));
6425 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
6426 /* Preserve sequence points. */
6427 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
6428 return non_lvalue (convert (type, arg0));
6429 /* If second arg is constant zero, result is zero, but first arg
6430 must be evaluated. */
6431 if (integer_zerop (arg1))
6432 return omit_one_operand (type, arg1, arg0);
6433 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
6434 case will be handled here. */
6435 if (integer_zerop (arg0))
6436 return omit_one_operand (type, arg0, arg1);
6437
6438 truth_andor:
6439 /* We only do these simplifications if we are optimizing. */
6440 if (!optimize)
6441 return t;
6442
6443 /* Check for things like (A || B) && (A || C). We can convert this
6444 to A || (B && C). Note that either operator can be any of the four
6445 truth and/or operations and the transformation will still be
6446 valid. Also note that we only care about order for the
6447 ANDIF and ORIF operators. If B contains side effects, this
6448 might change the truth-value of A. */
6449 if (TREE_CODE (arg0) == TREE_CODE (arg1)
6450 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
6451 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
6452 || TREE_CODE (arg0) == TRUTH_AND_EXPR
6453 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
6454 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
6455 {
6456 tree a00 = TREE_OPERAND (arg0, 0);
6457 tree a01 = TREE_OPERAND (arg0, 1);
6458 tree a10 = TREE_OPERAND (arg1, 0);
6459 tree a11 = TREE_OPERAND (arg1, 1);
6460 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
6461 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
6462 && (code == TRUTH_AND_EXPR
6463 || code == TRUTH_OR_EXPR));
6464
6465 if (operand_equal_p (a00, a10, 0))
6466 return fold (build (TREE_CODE (arg0), type, a00,
6467 fold (build (code, type, a01, a11))));
6468 else if (commutative && operand_equal_p (a00, a11, 0))
6469 return fold (build (TREE_CODE (arg0), type, a00,
6470 fold (build (code, type, a01, a10))));
6471 else if (commutative && operand_equal_p (a01, a10, 0))
6472 return fold (build (TREE_CODE (arg0), type, a01,
6473 fold (build (code, type, a00, a11))));
6474
6475 /* This case if tricky because we must either have commutative
6476 operators or else A10 must not have side-effects. */
6477
6478 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
6479 && operand_equal_p (a01, a11, 0))
6480 return fold (build (TREE_CODE (arg0), type,
6481 fold (build (code, type, a00, a10)),
6482 a01));
6483 }
6484
6485 /* See if we can build a range comparison. */
6486 if (0 != (tem = fold_range_test (t)))
6487 return tem;
6488
6489 /* Check for the possibility of merging component references. If our
6490 lhs is another similar operation, try to merge its rhs with our
6491 rhs. Then try to merge our lhs and rhs. */
6492 if (TREE_CODE (arg0) == code
6493 && 0 != (tem = fold_truthop (code, type,
6494 TREE_OPERAND (arg0, 1), arg1)))
6495 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6496
6497 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
6498 return tem;
6499
6500 return t;
6501
6502 case TRUTH_ORIF_EXPR:
6503 /* Note that the operands of this must be ints
6504 and their values must be 0 or true.
6505 ("true" is a fixed value perhaps depending on the language.) */
6506 /* If first arg is constant true, return it. */
6507 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6508 return convert (type, arg0);
6509 case TRUTH_OR_EXPR:
6510 /* If either arg is constant zero, drop it. */
6511 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
6512 return non_lvalue (convert (type, arg1));
6513 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
6514 /* Preserve sequence points. */
6515 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
6516 return non_lvalue (convert (type, arg0));
6517 /* If second arg is constant true, result is true, but we must
6518 evaluate first arg. */
6519 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
6520 return omit_one_operand (type, arg1, arg0);
6521 /* Likewise for first arg, but note this only occurs here for
6522 TRUTH_OR_EXPR. */
6523 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6524 return omit_one_operand (type, arg0, arg1);
6525 goto truth_andor;
6526
6527 case TRUTH_XOR_EXPR:
6528 /* If either arg is constant zero, drop it. */
6529 if (integer_zerop (arg0))
6530 return non_lvalue (convert (type, arg1));
6531 if (integer_zerop (arg1))
6532 return non_lvalue (convert (type, arg0));
6533 /* If either arg is constant true, this is a logical inversion. */
6534 if (integer_onep (arg0))
6535 return non_lvalue (convert (type, invert_truthvalue (arg1)));
6536 if (integer_onep (arg1))
6537 return non_lvalue (convert (type, invert_truthvalue (arg0)));
6538 return t;
6539
6540 case EQ_EXPR:
6541 case NE_EXPR:
6542 case LT_EXPR:
6543 case GT_EXPR:
6544 case LE_EXPR:
6545 case GE_EXPR:
6546 /* If one arg is a real or integer constant, put it last. */
6547 if ((TREE_CODE (arg0) == INTEGER_CST
6548 && TREE_CODE (arg1) != INTEGER_CST)
6549 || (TREE_CODE (arg0) == REAL_CST
6550 && TREE_CODE (arg0) != REAL_CST))
6551 {
6552 TREE_OPERAND (t, 0) = arg1;
6553 TREE_OPERAND (t, 1) = arg0;
6554 arg0 = TREE_OPERAND (t, 0);
6555 arg1 = TREE_OPERAND (t, 1);
6556 code = swap_tree_comparison (code);
6557 TREE_SET_CODE (t, code);
6558 }
6559
6560 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
6561 {
6562 tree targ0 = strip_float_extensions (arg0);
6563 tree targ1 = strip_float_extensions (arg1);
6564 tree newtype = TREE_TYPE (targ0);
6565
6566 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
6567 newtype = TREE_TYPE (targ1);
6568
6569 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
6570 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
6571 return fold (build (code, type, convert (newtype, targ0),
6572 convert (newtype, targ1)));
6573
6574 /* (-a) CMP (-b) -> b CMP a */
6575 if (TREE_CODE (arg0) == NEGATE_EXPR
6576 && TREE_CODE (arg1) == NEGATE_EXPR)
6577 return fold (build (code, type, TREE_OPERAND (arg1, 0),
6578 TREE_OPERAND (arg0, 0)));
6579
6580 if (TREE_CODE (arg1) == REAL_CST)
6581 {
6582 REAL_VALUE_TYPE cst;
6583 cst = TREE_REAL_CST (arg1);
6584
6585 /* (-a) CMP CST -> a swap(CMP) (-CST) */
6586 if (TREE_CODE (arg0) == NEGATE_EXPR)
6587 return
6588 fold (build (swap_tree_comparison (code), type,
6589 TREE_OPERAND (arg0, 0),
6590 build_real (TREE_TYPE (arg1),
6591 REAL_VALUE_NEGATE (cst))));
6592
6593 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
6594 /* a CMP (-0) -> a CMP 0 */
6595 if (REAL_VALUE_MINUS_ZERO (cst))
6596 return fold (build (code, type, arg0,
6597 build_real (TREE_TYPE (arg1), dconst0)));
6598
6599 /* x != NaN is always true, other ops are always false. */
6600 if (REAL_VALUE_ISNAN (cst)
6601 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
6602 {
6603 t = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
6604 return omit_one_operand (type, convert (type, t), arg0);
6605 }
6606
6607 /* Fold comparisons against infinity. */
6608 if (REAL_VALUE_ISINF (cst))
6609 {
6610 tem = fold_inf_compare (code, type, arg0, arg1);
6611 if (tem != NULL_TREE)
6612 return tem;
6613 }
6614 }
6615
6616 /* If this is a comparison of a real constant with a PLUS_EXPR
6617 or a MINUS_EXPR of a real constant, we can convert it into a
6618 comparison with a revised real constant as long as no overflow
6619 occurs when unsafe_math_optimizations are enabled. */
6620 if (flag_unsafe_math_optimizations
6621 && TREE_CODE (arg1) == REAL_CST
6622 && (TREE_CODE (arg0) == PLUS_EXPR
6623 || TREE_CODE (arg0) == MINUS_EXPR)
6624 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6625 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6626 ? MINUS_EXPR : PLUS_EXPR,
6627 arg1, TREE_OPERAND (arg0, 1), 0))
6628 && ! TREE_CONSTANT_OVERFLOW (tem))
6629 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6630
6631 /* Likewise, we can simplify a comparison of a real constant with
6632 a MINUS_EXPR whose first operand is also a real constant, i.e.
6633 (c1 - x) < c2 becomes x > c1-c2. */
6634 if (flag_unsafe_math_optimizations
6635 && TREE_CODE (arg1) == REAL_CST
6636 && TREE_CODE (arg0) == MINUS_EXPR
6637 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
6638 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
6639 arg1, 0))
6640 && ! TREE_CONSTANT_OVERFLOW (tem))
6641 return fold (build (swap_tree_comparison (code), type,
6642 TREE_OPERAND (arg0, 1), tem));
6643
6644 /* Fold comparisons against built-in math functions. */
6645 if (TREE_CODE (arg1) == REAL_CST
6646 && flag_unsafe_math_optimizations
6647 && ! flag_errno_math)
6648 {
6649 enum built_in_function fcode = builtin_mathfn_code (arg0);
6650
6651 if (fcode != END_BUILTINS)
6652 {
6653 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
6654 if (tem != NULL_TREE)
6655 return tem;
6656 }
6657 }
6658 }
6659
6660 /* Convert foo++ == CONST into ++foo == CONST + INCR.
6661 First, see if one arg is constant; find the constant arg
6662 and the other one. */
6663 {
6664 tree constop = 0, varop = NULL_TREE;
6665 int constopnum = -1;
6666
6667 if (TREE_CONSTANT (arg1))
6668 constopnum = 1, constop = arg1, varop = arg0;
6669 if (TREE_CONSTANT (arg0))
6670 constopnum = 0, constop = arg0, varop = arg1;
6671
6672 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
6673 {
6674 /* This optimization is invalid for ordered comparisons
6675 if CONST+INCR overflows or if foo+incr might overflow.
6676 This optimization is invalid for floating point due to rounding.
6677 For pointer types we assume overflow doesn't happen. */
6678 if (POINTER_TYPE_P (TREE_TYPE (varop))
6679 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6680 && (code == EQ_EXPR || code == NE_EXPR)))
6681 {
6682 tree newconst
6683 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
6684 constop, TREE_OPERAND (varop, 1)));
6685
6686 /* Do not overwrite the current varop to be a preincrement,
6687 create a new node so that we won't confuse our caller who
6688 might create trees and throw them away, reusing the
6689 arguments that they passed to build. This shows up in
6690 the THEN or ELSE parts of ?: being postincrements. */
6691 varop = build (PREINCREMENT_EXPR, TREE_TYPE (varop),
6692 TREE_OPERAND (varop, 0),
6693 TREE_OPERAND (varop, 1));
6694
6695 /* If VAROP is a reference to a bitfield, we must mask
6696 the constant by the width of the field. */
6697 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6698 && DECL_BIT_FIELD(TREE_OPERAND
6699 (TREE_OPERAND (varop, 0), 1)))
6700 {
6701 int size
6702 = TREE_INT_CST_LOW (DECL_SIZE
6703 (TREE_OPERAND
6704 (TREE_OPERAND (varop, 0), 1)));
6705 tree mask, unsigned_type;
6706 unsigned int precision;
6707 tree folded_compare;
6708
6709 /* First check whether the comparison would come out
6710 always the same. If we don't do that we would
6711 change the meaning with the masking. */
6712 if (constopnum == 0)
6713 folded_compare = fold (build (code, type, constop,
6714 TREE_OPERAND (varop, 0)));
6715 else
6716 folded_compare = fold (build (code, type,
6717 TREE_OPERAND (varop, 0),
6718 constop));
6719 if (integer_zerop (folded_compare)
6720 || integer_onep (folded_compare))
6721 return omit_one_operand (type, folded_compare, varop);
6722
6723 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
6724 precision = TYPE_PRECISION (unsigned_type);
6725 mask = build_int_2 (~0, ~0);
6726 TREE_TYPE (mask) = unsigned_type;
6727 force_fit_type (mask, 0);
6728 mask = const_binop (RSHIFT_EXPR, mask,
6729 size_int (precision - size), 0);
6730 newconst = fold (build (BIT_AND_EXPR,
6731 TREE_TYPE (varop), newconst,
6732 convert (TREE_TYPE (varop),
6733 mask)));
6734 }
6735
6736 t = build (code, type,
6737 (constopnum == 0) ? newconst : varop,
6738 (constopnum == 1) ? newconst : varop);
6739 return t;
6740 }
6741 }
6742 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
6743 {
6744 if (POINTER_TYPE_P (TREE_TYPE (varop))
6745 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6746 && (code == EQ_EXPR || code == NE_EXPR)))
6747 {
6748 tree newconst
6749 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
6750 constop, TREE_OPERAND (varop, 1)));
6751
6752 /* Do not overwrite the current varop to be a predecrement,
6753 create a new node so that we won't confuse our caller who
6754 might create trees and throw them away, reusing the
6755 arguments that they passed to build. This shows up in
6756 the THEN or ELSE parts of ?: being postdecrements. */
6757 varop = build (PREDECREMENT_EXPR, TREE_TYPE (varop),
6758 TREE_OPERAND (varop, 0),
6759 TREE_OPERAND (varop, 1));
6760
6761 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6762 && DECL_BIT_FIELD(TREE_OPERAND
6763 (TREE_OPERAND (varop, 0), 1)))
6764 {
6765 int size
6766 = TREE_INT_CST_LOW (DECL_SIZE
6767 (TREE_OPERAND
6768 (TREE_OPERAND (varop, 0), 1)));
6769 tree mask, unsigned_type;
6770 unsigned int precision;
6771 tree folded_compare;
6772
6773 if (constopnum == 0)
6774 folded_compare = fold (build (code, type, constop,
6775 TREE_OPERAND (varop, 0)));
6776 else
6777 folded_compare = fold (build (code, type,
6778 TREE_OPERAND (varop, 0),
6779 constop));
6780 if (integer_zerop (folded_compare)
6781 || integer_onep (folded_compare))
6782 return omit_one_operand (type, folded_compare, varop);
6783
6784 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
6785 precision = TYPE_PRECISION (unsigned_type);
6786 mask = build_int_2 (~0, ~0);
6787 TREE_TYPE (mask) = TREE_TYPE (varop);
6788 force_fit_type (mask, 0);
6789 mask = const_binop (RSHIFT_EXPR, mask,
6790 size_int (precision - size), 0);
6791 newconst = fold (build (BIT_AND_EXPR,
6792 TREE_TYPE (varop), newconst,
6793 convert (TREE_TYPE (varop),
6794 mask)));
6795 }
6796
6797 t = build (code, type,
6798 (constopnum == 0) ? newconst : varop,
6799 (constopnum == 1) ? newconst : varop);
6800 return t;
6801 }
6802 }
6803 }
6804
6805 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
6806 This transformation affects the cases which are handled in later
6807 optimizations involving comparisons with non-negative constants. */
6808 if (TREE_CODE (arg1) == INTEGER_CST
6809 && TREE_CODE (arg0) != INTEGER_CST
6810 && tree_int_cst_sgn (arg1) > 0)
6811 {
6812 switch (code)
6813 {
6814 case GE_EXPR:
6815 code = GT_EXPR;
6816 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6817 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6818 break;
6819
6820 case LT_EXPR:
6821 code = LE_EXPR;
6822 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6823 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6824 break;
6825
6826 default:
6827 break;
6828 }
6829 }
6830
6831 /* Comparisons with the highest or lowest possible integer of
6832 the specified size will have known values. */
6833 {
6834 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
6835
6836 if (TREE_CODE (arg1) == INTEGER_CST
6837 && ! TREE_CONSTANT_OVERFLOW (arg1)
6838 && width <= HOST_BITS_PER_WIDE_INT
6839 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6840 || POINTER_TYPE_P (TREE_TYPE (arg1))))
6841 {
6842 unsigned HOST_WIDE_INT signed_max;
6843 unsigned HOST_WIDE_INT max, min;
6844
6845 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
6846
6847 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6848 {
6849 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
6850 min = 0;
6851 }
6852 else
6853 {
6854 max = signed_max;
6855 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
6856 }
6857
6858 if (TREE_INT_CST_HIGH (arg1) == 0
6859 && TREE_INT_CST_LOW (arg1) == max)
6860 switch (code)
6861 {
6862 case GT_EXPR:
6863 return omit_one_operand (type,
6864 convert (type, integer_zero_node),
6865 arg0);
6866 case GE_EXPR:
6867 code = EQ_EXPR;
6868 TREE_SET_CODE (t, EQ_EXPR);
6869 break;
6870 case LE_EXPR:
6871 return omit_one_operand (type,
6872 convert (type, integer_one_node),
6873 arg0);
6874 case LT_EXPR:
6875 code = NE_EXPR;
6876 TREE_SET_CODE (t, NE_EXPR);
6877 break;
6878
6879 /* The GE_EXPR and LT_EXPR cases above are not normally
6880 reached because of previous transformations. */
6881
6882 default:
6883 break;
6884 }
6885 else if (TREE_INT_CST_HIGH (arg1) == 0
6886 && TREE_INT_CST_LOW (arg1) == max - 1)
6887 switch (code)
6888 {
6889 case GT_EXPR:
6890 code = EQ_EXPR;
6891 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
6892 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6893 break;
6894 case LE_EXPR:
6895 code = NE_EXPR;
6896 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
6897 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6898 break;
6899 default:
6900 break;
6901 }
6902 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
6903 && TREE_INT_CST_LOW (arg1) == min)
6904 switch (code)
6905 {
6906 case LT_EXPR:
6907 return omit_one_operand (type,
6908 convert (type, integer_zero_node),
6909 arg0);
6910 case LE_EXPR:
6911 code = EQ_EXPR;
6912 TREE_SET_CODE (t, EQ_EXPR);
6913 break;
6914
6915 case GE_EXPR:
6916 return omit_one_operand (type,
6917 convert (type, integer_one_node),
6918 arg0);
6919 case GT_EXPR:
6920 code = NE_EXPR;
6921 TREE_SET_CODE (t, NE_EXPR);
6922 break;
6923
6924 default:
6925 break;
6926 }
6927 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
6928 && TREE_INT_CST_LOW (arg1) == min + 1)
6929 switch (code)
6930 {
6931 case GE_EXPR:
6932 code = NE_EXPR;
6933 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6934 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6935 break;
6936 case LT_EXPR:
6937 code = EQ_EXPR;
6938 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6939 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6940 break;
6941 default:
6942 break;
6943 }
6944
6945 else if (TREE_INT_CST_HIGH (arg1) == 0
6946 && TREE_INT_CST_LOW (arg1) == signed_max
6947 && TREE_UNSIGNED (TREE_TYPE (arg1))
6948 /* signed_type does not work on pointer types. */
6949 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
6950 {
6951 /* The following case also applies to X < signed_max+1
6952 and X >= signed_max+1 because previous transformations. */
6953 if (code == LE_EXPR || code == GT_EXPR)
6954 {
6955 tree st0, st1;
6956 st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
6957 st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
6958 return fold
6959 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
6960 type, convert (st0, arg0),
6961 convert (st1, integer_zero_node)));
6962 }
6963 }
6964 }
6965 }
6966
6967 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
6968 a MINUS_EXPR of a constant, we can convert it into a comparison with
6969 a revised constant as long as no overflow occurs. */
6970 if ((code == EQ_EXPR || code == NE_EXPR)
6971 && TREE_CODE (arg1) == INTEGER_CST
6972 && (TREE_CODE (arg0) == PLUS_EXPR
6973 || TREE_CODE (arg0) == MINUS_EXPR)
6974 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6975 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6976 ? MINUS_EXPR : PLUS_EXPR,
6977 arg1, TREE_OPERAND (arg0, 1), 0))
6978 && ! TREE_CONSTANT_OVERFLOW (tem))
6979 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6980
6981 /* Similarly for a NEGATE_EXPR. */
6982 else if ((code == EQ_EXPR || code == NE_EXPR)
6983 && TREE_CODE (arg0) == NEGATE_EXPR
6984 && TREE_CODE (arg1) == INTEGER_CST
6985 && 0 != (tem = negate_expr (arg1))
6986 && TREE_CODE (tem) == INTEGER_CST
6987 && ! TREE_CONSTANT_OVERFLOW (tem))
6988 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6989
6990 /* If we have X - Y == 0, we can convert that to X == Y and similarly
6991 for !=. Don't do this for ordered comparisons due to overflow. */
6992 else if ((code == NE_EXPR || code == EQ_EXPR)
6993 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
6994 return fold (build (code, type,
6995 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
6996
6997 /* If we are widening one operand of an integer comparison,
6998 see if the other operand is similarly being widened. Perhaps we
6999 can do the comparison in the narrower type. */
7000 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7001 && TREE_CODE (arg0) == NOP_EXPR
7002 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7003 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7004 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7005 || (TREE_CODE (t1) == INTEGER_CST
7006 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7007 return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
7008
7009 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7010 constant, we can simplify it. */
7011 else if (TREE_CODE (arg1) == INTEGER_CST
7012 && (TREE_CODE (arg0) == MIN_EXPR
7013 || TREE_CODE (arg0) == MAX_EXPR)
7014 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7015 return optimize_minmax_comparison (t);
7016
7017 /* If we are comparing an ABS_EXPR with a constant, we can
7018 convert all the cases into explicit comparisons, but they may
7019 well not be faster than doing the ABS and one comparison.
7020 But ABS (X) <= C is a range comparison, which becomes a subtraction
7021 and a comparison, and is probably faster. */
7022 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7023 && TREE_CODE (arg0) == ABS_EXPR
7024 && ! TREE_SIDE_EFFECTS (arg0)
7025 && (0 != (tem = negate_expr (arg1)))
7026 && TREE_CODE (tem) == INTEGER_CST
7027 && ! TREE_CONSTANT_OVERFLOW (tem))
7028 return fold (build (TRUTH_ANDIF_EXPR, type,
7029 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
7030 build (LE_EXPR, type,
7031 TREE_OPERAND (arg0, 0), arg1)));
7032
7033 /* If this is an EQ or NE comparison with zero and ARG0 is
7034 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7035 two operations, but the latter can be done in one less insn
7036 on machines that have only two-operand insns or on which a
7037 constant cannot be the first operand. */
7038 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7039 && TREE_CODE (arg0) == BIT_AND_EXPR)
7040 {
7041 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
7042 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
7043 return
7044 fold (build (code, type,
7045 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7046 build (RSHIFT_EXPR,
7047 TREE_TYPE (TREE_OPERAND (arg0, 0)),
7048 TREE_OPERAND (arg0, 1),
7049 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
7050 convert (TREE_TYPE (arg0),
7051 integer_one_node)),
7052 arg1));
7053 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7054 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7055 return
7056 fold (build (code, type,
7057 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7058 build (RSHIFT_EXPR,
7059 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7060 TREE_OPERAND (arg0, 0),
7061 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
7062 convert (TREE_TYPE (arg0),
7063 integer_one_node)),
7064 arg1));
7065 }
7066
7067 /* If this is an NE or EQ comparison of zero against the result of a
7068 signed MOD operation whose second operand is a power of 2, make
7069 the MOD operation unsigned since it is simpler and equivalent. */
7070 if ((code == NE_EXPR || code == EQ_EXPR)
7071 && integer_zerop (arg1)
7072 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
7073 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7074 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7075 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7076 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7077 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7078 {
7079 tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
7080 tree newmod = build (TREE_CODE (arg0), newtype,
7081 convert (newtype, TREE_OPERAND (arg0, 0)),
7082 convert (newtype, TREE_OPERAND (arg0, 1)));
7083
7084 return build (code, type, newmod, convert (newtype, arg1));
7085 }
7086
7087 /* If this is an NE comparison of zero with an AND of one, remove the
7088 comparison since the AND will give the correct value. */
7089 if (code == NE_EXPR && integer_zerop (arg1)
7090 && TREE_CODE (arg0) == BIT_AND_EXPR
7091 && integer_onep (TREE_OPERAND (arg0, 1)))
7092 return convert (type, arg0);
7093
7094 /* If we have (A & C) == C where C is a power of 2, convert this into
7095 (A & C) != 0. Similarly for NE_EXPR. */
7096 if ((code == EQ_EXPR || code == NE_EXPR)
7097 && TREE_CODE (arg0) == BIT_AND_EXPR
7098 && integer_pow2p (TREE_OPERAND (arg0, 1))
7099 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7100 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7101 arg0, integer_zero_node));
7102
7103 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7104 2, then fold the expression into shifts and logical operations. */
7105 tem = fold_single_bit_test (code, arg0, arg1, type);
7106 if (tem)
7107 return tem;
7108
7109 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7110 and similarly for >= into !=. */
7111 if ((code == LT_EXPR || code == GE_EXPR)
7112 && TREE_UNSIGNED (TREE_TYPE (arg0))
7113 && TREE_CODE (arg1) == LSHIFT_EXPR
7114 && integer_onep (TREE_OPERAND (arg1, 0)))
7115 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7116 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7117 TREE_OPERAND (arg1, 1)),
7118 convert (TREE_TYPE (arg0), integer_zero_node));
7119
7120 else if ((code == LT_EXPR || code == GE_EXPR)
7121 && TREE_UNSIGNED (TREE_TYPE (arg0))
7122 && (TREE_CODE (arg1) == NOP_EXPR
7123 || TREE_CODE (arg1) == CONVERT_EXPR)
7124 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7125 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7126 return
7127 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7128 convert (TREE_TYPE (arg0),
7129 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7130 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
7131 convert (TREE_TYPE (arg0), integer_zero_node));
7132
7133 /* Simplify comparison of something with itself. (For IEEE
7134 floating-point, we can only do some of these simplifications.) */
7135 if (operand_equal_p (arg0, arg1, 0))
7136 {
7137 switch (code)
7138 {
7139 case EQ_EXPR:
7140 case GE_EXPR:
7141 case LE_EXPR:
7142 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7143 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7144 return constant_boolean_node (1, type);
7145 code = EQ_EXPR;
7146 TREE_SET_CODE (t, code);
7147 break;
7148
7149 case NE_EXPR:
7150 /* For NE, we can only do this simplification if integer
7151 or we don't honor IEEE floating point NaNs. */
7152 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7153 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7154 break;
7155 /* ... fall through ... */
7156 case GT_EXPR:
7157 case LT_EXPR:
7158 return constant_boolean_node (0, type);
7159 default:
7160 abort ();
7161 }
7162 }
7163
7164 /* If we are comparing an expression that just has comparisons
7165 of two integer values, arithmetic expressions of those comparisons,
7166 and constants, we can simplify it. There are only three cases
7167 to check: the two values can either be equal, the first can be
7168 greater, or the second can be greater. Fold the expression for
7169 those three values. Since each value must be 0 or 1, we have
7170 eight possibilities, each of which corresponds to the constant 0
7171 or 1 or one of the six possible comparisons.
7172
7173 This handles common cases like (a > b) == 0 but also handles
7174 expressions like ((x > y) - (y > x)) > 0, which supposedly
7175 occur in macroized code. */
7176
7177 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7178 {
7179 tree cval1 = 0, cval2 = 0;
7180 int save_p = 0;
7181
7182 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7183 /* Don't handle degenerate cases here; they should already
7184 have been handled anyway. */
7185 && cval1 != 0 && cval2 != 0
7186 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7187 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7188 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7189 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7190 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7191 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7192 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7193 {
7194 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7195 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7196
7197 /* We can't just pass T to eval_subst in case cval1 or cval2
7198 was the same as ARG1. */
7199
7200 tree high_result
7201 = fold (build (code, type,
7202 eval_subst (arg0, cval1, maxval, cval2, minval),
7203 arg1));
7204 tree equal_result
7205 = fold (build (code, type,
7206 eval_subst (arg0, cval1, maxval, cval2, maxval),
7207 arg1));
7208 tree low_result
7209 = fold (build (code, type,
7210 eval_subst (arg0, cval1, minval, cval2, maxval),
7211 arg1));
7212
7213 /* All three of these results should be 0 or 1. Confirm they
7214 are. Then use those values to select the proper code
7215 to use. */
7216
7217 if ((integer_zerop (high_result)
7218 || integer_onep (high_result))
7219 && (integer_zerop (equal_result)
7220 || integer_onep (equal_result))
7221 && (integer_zerop (low_result)
7222 || integer_onep (low_result)))
7223 {
7224 /* Make a 3-bit mask with the high-order bit being the
7225 value for `>', the next for '=', and the low for '<'. */
7226 switch ((integer_onep (high_result) * 4)
7227 + (integer_onep (equal_result) * 2)
7228 + integer_onep (low_result))
7229 {
7230 case 0:
7231 /* Always false. */
7232 return omit_one_operand (type, integer_zero_node, arg0);
7233 case 1:
7234 code = LT_EXPR;
7235 break;
7236 case 2:
7237 code = EQ_EXPR;
7238 break;
7239 case 3:
7240 code = LE_EXPR;
7241 break;
7242 case 4:
7243 code = GT_EXPR;
7244 break;
7245 case 5:
7246 code = NE_EXPR;
7247 break;
7248 case 6:
7249 code = GE_EXPR;
7250 break;
7251 case 7:
7252 /* Always true. */
7253 return omit_one_operand (type, integer_one_node, arg0);
7254 }
7255
7256 t = build (code, type, cval1, cval2);
7257 if (save_p)
7258 return save_expr (t);
7259 else
7260 return fold (t);
7261 }
7262 }
7263 }
7264
7265 /* If this is a comparison of a field, we may be able to simplify it. */
7266 if (((TREE_CODE (arg0) == COMPONENT_REF
7267 && (*lang_hooks.can_use_bit_fields_p) ())
7268 || TREE_CODE (arg0) == BIT_FIELD_REF)
7269 && (code == EQ_EXPR || code == NE_EXPR)
7270 /* Handle the constant case even without -O
7271 to make sure the warnings are given. */
7272 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
7273 {
7274 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
7275 return t1 ? t1 : t;
7276 }
7277
7278 /* If this is a comparison of complex values and either or both sides
7279 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7280 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7281 This may prevent needless evaluations. */
7282 if ((code == EQ_EXPR || code == NE_EXPR)
7283 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
7284 && (TREE_CODE (arg0) == COMPLEX_EXPR
7285 || TREE_CODE (arg1) == COMPLEX_EXPR
7286 || TREE_CODE (arg0) == COMPLEX_CST
7287 || TREE_CODE (arg1) == COMPLEX_CST))
7288 {
7289 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
7290 tree real0, imag0, real1, imag1;
7291
7292 arg0 = save_expr (arg0);
7293 arg1 = save_expr (arg1);
7294 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
7295 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
7296 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
7297 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
7298
7299 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
7300 : TRUTH_ORIF_EXPR),
7301 type,
7302 fold (build (code, type, real0, real1)),
7303 fold (build (code, type, imag0, imag1))));
7304 }
7305
7306 /* Optimize comparisons of strlen vs zero to a compare of the
7307 first character of the string vs zero. To wit,
7308 strlen(ptr) == 0 => *ptr == 0
7309 strlen(ptr) != 0 => *ptr != 0
7310 Other cases should reduce to one of these two (or a constant)
7311 due to the return value of strlen being unsigned. */
7312 if ((code == EQ_EXPR || code == NE_EXPR)
7313 && integer_zerop (arg1)
7314 && TREE_CODE (arg0) == CALL_EXPR
7315 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR)
7316 {
7317 tree fndecl = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7318 tree arglist;
7319
7320 if (TREE_CODE (fndecl) == FUNCTION_DECL
7321 && DECL_BUILT_IN (fndecl)
7322 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
7323 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
7324 && (arglist = TREE_OPERAND (arg0, 1))
7325 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
7326 && ! TREE_CHAIN (arglist))
7327 return fold (build (code, type,
7328 build1 (INDIRECT_REF, char_type_node,
7329 TREE_VALUE(arglist)),
7330 integer_zero_node));
7331 }
7332
7333 /* From here on, the only cases we handle are when the result is
7334 known to be a constant.
7335
7336 To compute GT, swap the arguments and do LT.
7337 To compute GE, do LT and invert the result.
7338 To compute LE, swap the arguments, do LT and invert the result.
7339 To compute NE, do EQ and invert the result.
7340
7341 Therefore, the code below must handle only EQ and LT. */
7342
7343 if (code == LE_EXPR || code == GT_EXPR)
7344 {
7345 tem = arg0, arg0 = arg1, arg1 = tem;
7346 code = swap_tree_comparison (code);
7347 }
7348
7349 /* Note that it is safe to invert for real values here because we
7350 will check below in the one case that it matters. */
7351
7352 t1 = NULL_TREE;
7353 invert = 0;
7354 if (code == NE_EXPR || code == GE_EXPR)
7355 {
7356 invert = 1;
7357 code = invert_tree_comparison (code);
7358 }
7359
7360 /* Compute a result for LT or EQ if args permit;
7361 otherwise return T. */
7362 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
7363 {
7364 if (code == EQ_EXPR)
7365 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
7366 else
7367 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
7368 ? INT_CST_LT_UNSIGNED (arg0, arg1)
7369 : INT_CST_LT (arg0, arg1)),
7370 0);
7371 }
7372
7373 #if 0 /* This is no longer useful, but breaks some real code. */
7374 /* Assume a nonexplicit constant cannot equal an explicit one,
7375 since such code would be undefined anyway.
7376 Exception: on sysvr4, using #pragma weak,
7377 a label can come out as 0. */
7378 else if (TREE_CODE (arg1) == INTEGER_CST
7379 && !integer_zerop (arg1)
7380 && TREE_CONSTANT (arg0)
7381 && TREE_CODE (arg0) == ADDR_EXPR
7382 && code == EQ_EXPR)
7383 t1 = build_int_2 (0, 0);
7384 #endif
7385 /* Two real constants can be compared explicitly. */
7386 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
7387 {
7388 /* If either operand is a NaN, the result is false with two
7389 exceptions: First, an NE_EXPR is true on NaNs, but that case
7390 is already handled correctly since we will be inverting the
7391 result for NE_EXPR. Second, if we had inverted a LE_EXPR
7392 or a GE_EXPR into a LT_EXPR, we must return true so that it
7393 will be inverted into false. */
7394
7395 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
7396 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
7397 t1 = build_int_2 (invert && code == LT_EXPR, 0);
7398
7399 else if (code == EQ_EXPR)
7400 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
7401 TREE_REAL_CST (arg1)),
7402 0);
7403 else
7404 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
7405 TREE_REAL_CST (arg1)),
7406 0);
7407 }
7408
7409 if (t1 == NULL_TREE)
7410 return t;
7411
7412 if (invert)
7413 TREE_INT_CST_LOW (t1) ^= 1;
7414
7415 TREE_TYPE (t1) = type;
7416 if (TREE_CODE (type) == BOOLEAN_TYPE)
7417 return (*lang_hooks.truthvalue_conversion) (t1);
7418 return t1;
7419
7420 case COND_EXPR:
7421 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
7422 so all simple results must be passed through pedantic_non_lvalue. */
7423 if (TREE_CODE (arg0) == INTEGER_CST)
7424 return pedantic_non_lvalue
7425 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
7426 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
7427 return pedantic_omit_one_operand (type, arg1, arg0);
7428
7429 /* If the second operand is zero, invert the comparison and swap
7430 the second and third operands. Likewise if the second operand
7431 is constant and the third is not or if the third operand is
7432 equivalent to the first operand of the comparison. */
7433
7434 if (integer_zerop (arg1)
7435 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
7436 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7437 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7438 TREE_OPERAND (t, 2),
7439 TREE_OPERAND (arg0, 1))))
7440 {
7441 /* See if this can be inverted. If it can't, possibly because
7442 it was a floating-point inequality comparison, don't do
7443 anything. */
7444 tem = invert_truthvalue (arg0);
7445
7446 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7447 {
7448 t = build (code, type, tem,
7449 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
7450 arg0 = tem;
7451 /* arg1 should be the first argument of the new T. */
7452 arg1 = TREE_OPERAND (t, 1);
7453 STRIP_NOPS (arg1);
7454 }
7455 }
7456
7457 /* If we have A op B ? A : C, we may be able to convert this to a
7458 simpler expression, depending on the operation and the values
7459 of B and C. Signed zeros prevent all of these transformations,
7460 for reasons given above each one. */
7461
7462 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7463 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7464 arg1, TREE_OPERAND (arg0, 1))
7465 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
7466 {
7467 tree arg2 = TREE_OPERAND (t, 2);
7468 enum tree_code comp_code = TREE_CODE (arg0);
7469
7470 STRIP_NOPS (arg2);
7471
7472 /* If we have A op 0 ? A : -A, consider applying the following
7473 transformations:
7474
7475 A == 0? A : -A same as -A
7476 A != 0? A : -A same as A
7477 A >= 0? A : -A same as abs (A)
7478 A > 0? A : -A same as abs (A)
7479 A <= 0? A : -A same as -abs (A)
7480 A < 0? A : -A same as -abs (A)
7481
7482 None of these transformations work for modes with signed
7483 zeros. If A is +/-0, the first two transformations will
7484 change the sign of the result (from +0 to -0, or vice
7485 versa). The last four will fix the sign of the result,
7486 even though the original expressions could be positive or
7487 negative, depending on the sign of A.
7488
7489 Note that all these transformations are correct if A is
7490 NaN, since the two alternatives (A and -A) are also NaNs. */
7491 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
7492 ? real_zerop (TREE_OPERAND (arg0, 1))
7493 : integer_zerop (TREE_OPERAND (arg0, 1)))
7494 && TREE_CODE (arg2) == NEGATE_EXPR
7495 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
7496 switch (comp_code)
7497 {
7498 case EQ_EXPR:
7499 return
7500 pedantic_non_lvalue
7501 (convert (type,
7502 negate_expr
7503 (convert (TREE_TYPE (TREE_OPERAND (t, 1)),
7504 arg1))));
7505 case NE_EXPR:
7506 return pedantic_non_lvalue (convert (type, arg1));
7507 case GE_EXPR:
7508 case GT_EXPR:
7509 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7510 arg1 = convert ((*lang_hooks.types.signed_type)
7511 (TREE_TYPE (arg1)), arg1);
7512 return pedantic_non_lvalue
7513 (convert (type, fold (build1 (ABS_EXPR,
7514 TREE_TYPE (arg1), arg1))));
7515 case LE_EXPR:
7516 case LT_EXPR:
7517 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7518 arg1 = convert ((lang_hooks.types.signed_type)
7519 (TREE_TYPE (arg1)), arg1);
7520 return pedantic_non_lvalue
7521 (negate_expr (convert (type,
7522 fold (build1 (ABS_EXPR,
7523 TREE_TYPE (arg1),
7524 arg1)))));
7525 default:
7526 abort ();
7527 }
7528
7529 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
7530 A == 0 ? A : 0 is always 0 unless A is -0. Note that
7531 both transformations are correct when A is NaN: A != 0
7532 is then true, and A == 0 is false. */
7533
7534 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
7535 {
7536 if (comp_code == NE_EXPR)
7537 return pedantic_non_lvalue (convert (type, arg1));
7538 else if (comp_code == EQ_EXPR)
7539 return pedantic_non_lvalue (convert (type, integer_zero_node));
7540 }
7541
7542 /* Try some transformations of A op B ? A : B.
7543
7544 A == B? A : B same as B
7545 A != B? A : B same as A
7546 A >= B? A : B same as max (A, B)
7547 A > B? A : B same as max (B, A)
7548 A <= B? A : B same as min (A, B)
7549 A < B? A : B same as min (B, A)
7550
7551 As above, these transformations don't work in the presence
7552 of signed zeros. For example, if A and B are zeros of
7553 opposite sign, the first two transformations will change
7554 the sign of the result. In the last four, the original
7555 expressions give different results for (A=+0, B=-0) and
7556 (A=-0, B=+0), but the transformed expressions do not.
7557
7558 The first two transformations are correct if either A or B
7559 is a NaN. In the first transformation, the condition will
7560 be false, and B will indeed be chosen. In the case of the
7561 second transformation, the condition A != B will be true,
7562 and A will be chosen.
7563
7564 The conversions to max() and min() are not correct if B is
7565 a number and A is not. The conditions in the original
7566 expressions will be false, so all four give B. The min()
7567 and max() versions would give a NaN instead. */
7568 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
7569 arg2, TREE_OPERAND (arg0, 0)))
7570 {
7571 tree comp_op0 = TREE_OPERAND (arg0, 0);
7572 tree comp_op1 = TREE_OPERAND (arg0, 1);
7573 tree comp_type = TREE_TYPE (comp_op0);
7574
7575 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
7576 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
7577 {
7578 comp_type = type;
7579 comp_op0 = arg1;
7580 comp_op1 = arg2;
7581 }
7582
7583 switch (comp_code)
7584 {
7585 case EQ_EXPR:
7586 return pedantic_non_lvalue (convert (type, arg2));
7587 case NE_EXPR:
7588 return pedantic_non_lvalue (convert (type, arg1));
7589 case LE_EXPR:
7590 case LT_EXPR:
7591 /* In C++ a ?: expression can be an lvalue, so put the
7592 operand which will be used if they are equal first
7593 so that we can convert this back to the
7594 corresponding COND_EXPR. */
7595 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
7596 return pedantic_non_lvalue
7597 (convert (type, fold (build (MIN_EXPR, comp_type,
7598 (comp_code == LE_EXPR
7599 ? comp_op0 : comp_op1),
7600 (comp_code == LE_EXPR
7601 ? comp_op1 : comp_op0)))));
7602 break;
7603 case GE_EXPR:
7604 case GT_EXPR:
7605 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
7606 return pedantic_non_lvalue
7607 (convert (type, fold (build (MAX_EXPR, comp_type,
7608 (comp_code == GE_EXPR
7609 ? comp_op0 : comp_op1),
7610 (comp_code == GE_EXPR
7611 ? comp_op1 : comp_op0)))));
7612 break;
7613 default:
7614 abort ();
7615 }
7616 }
7617
7618 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
7619 we might still be able to simplify this. For example,
7620 if C1 is one less or one more than C2, this might have started
7621 out as a MIN or MAX and been transformed by this function.
7622 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
7623
7624 if (INTEGRAL_TYPE_P (type)
7625 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7626 && TREE_CODE (arg2) == INTEGER_CST)
7627 switch (comp_code)
7628 {
7629 case EQ_EXPR:
7630 /* We can replace A with C1 in this case. */
7631 arg1 = convert (type, TREE_OPERAND (arg0, 1));
7632 t = build (code, type, TREE_OPERAND (t, 0), arg1,
7633 TREE_OPERAND (t, 2));
7634 break;
7635
7636 case LT_EXPR:
7637 /* If C1 is C2 + 1, this is min(A, C2). */
7638 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
7639 && operand_equal_p (TREE_OPERAND (arg0, 1),
7640 const_binop (PLUS_EXPR, arg2,
7641 integer_one_node, 0), 1))
7642 return pedantic_non_lvalue
7643 (fold (build (MIN_EXPR, type, arg1, arg2)));
7644 break;
7645
7646 case LE_EXPR:
7647 /* If C1 is C2 - 1, this is min(A, C2). */
7648 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
7649 && operand_equal_p (TREE_OPERAND (arg0, 1),
7650 const_binop (MINUS_EXPR, arg2,
7651 integer_one_node, 0), 1))
7652 return pedantic_non_lvalue
7653 (fold (build (MIN_EXPR, type, arg1, arg2)));
7654 break;
7655
7656 case GT_EXPR:
7657 /* If C1 is C2 - 1, this is max(A, C2). */
7658 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
7659 && operand_equal_p (TREE_OPERAND (arg0, 1),
7660 const_binop (MINUS_EXPR, arg2,
7661 integer_one_node, 0), 1))
7662 return pedantic_non_lvalue
7663 (fold (build (MAX_EXPR, type, arg1, arg2)));
7664 break;
7665
7666 case GE_EXPR:
7667 /* If C1 is C2 + 1, this is max(A, C2). */
7668 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
7669 && operand_equal_p (TREE_OPERAND (arg0, 1),
7670 const_binop (PLUS_EXPR, arg2,
7671 integer_one_node, 0), 1))
7672 return pedantic_non_lvalue
7673 (fold (build (MAX_EXPR, type, arg1, arg2)));
7674 break;
7675 case NE_EXPR:
7676 break;
7677 default:
7678 abort ();
7679 }
7680 }
7681
7682 /* If the second operand is simpler than the third, swap them
7683 since that produces better jump optimization results. */
7684 if ((TREE_CONSTANT (arg1) || DECL_P (arg1)
7685 || TREE_CODE (arg1) == SAVE_EXPR)
7686 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
7687 || DECL_P (TREE_OPERAND (t, 2))
7688 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
7689 {
7690 /* See if this can be inverted. If it can't, possibly because
7691 it was a floating-point inequality comparison, don't do
7692 anything. */
7693 tem = invert_truthvalue (arg0);
7694
7695 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7696 {
7697 t = build (code, type, tem,
7698 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
7699 arg0 = tem;
7700 /* arg1 should be the first argument of the new T. */
7701 arg1 = TREE_OPERAND (t, 1);
7702 STRIP_NOPS (arg1);
7703 }
7704 }
7705
7706 /* Convert A ? 1 : 0 to simply A. */
7707 if (integer_onep (TREE_OPERAND (t, 1))
7708 && integer_zerop (TREE_OPERAND (t, 2))
7709 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
7710 call to fold will try to move the conversion inside
7711 a COND, which will recurse. In that case, the COND_EXPR
7712 is probably the best choice, so leave it alone. */
7713 && type == TREE_TYPE (arg0))
7714 return pedantic_non_lvalue (arg0);
7715
7716 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
7717 over COND_EXPR in cases such as floating point comparisons. */
7718 if (integer_zerop (TREE_OPERAND (t, 1))
7719 && integer_onep (TREE_OPERAND (t, 2))
7720 && truth_value_p (TREE_CODE (arg0)))
7721 return pedantic_non_lvalue (convert (type,
7722 invert_truthvalue (arg0)));
7723
7724 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
7725 operation is simply A & 2. */
7726
7727 if (integer_zerop (TREE_OPERAND (t, 2))
7728 && TREE_CODE (arg0) == NE_EXPR
7729 && integer_zerop (TREE_OPERAND (arg0, 1))
7730 && integer_pow2p (arg1)
7731 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
7732 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
7733 arg1, 1))
7734 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
7735
7736 /* Convert A ? B : 0 into A && B if A and B are truth values. */
7737 if (integer_zerop (TREE_OPERAND (t, 2))
7738 && truth_value_p (TREE_CODE (arg0))
7739 && truth_value_p (TREE_CODE (arg1)))
7740 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
7741 arg0, arg1)));
7742
7743 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
7744 if (integer_onep (TREE_OPERAND (t, 2))
7745 && truth_value_p (TREE_CODE (arg0))
7746 && truth_value_p (TREE_CODE (arg1)))
7747 {
7748 /* Only perform transformation if ARG0 is easily inverted. */
7749 tem = invert_truthvalue (arg0);
7750 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7751 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
7752 tem, arg1)));
7753 }
7754
7755 return t;
7756
7757 case COMPOUND_EXPR:
7758 /* When pedantic, a compound expression can be neither an lvalue
7759 nor an integer constant expression. */
7760 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
7761 return t;
7762 /* Don't let (0, 0) be null pointer constant. */
7763 if (integer_zerop (arg1))
7764 return build1 (NOP_EXPR, type, arg1);
7765 return convert (type, arg1);
7766
7767 case COMPLEX_EXPR:
7768 if (wins)
7769 return build_complex (type, arg0, arg1);
7770 return t;
7771
7772 case REALPART_EXPR:
7773 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7774 return t;
7775 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7776 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7777 TREE_OPERAND (arg0, 1));
7778 else if (TREE_CODE (arg0) == COMPLEX_CST)
7779 return TREE_REALPART (arg0);
7780 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7781 return fold (build (TREE_CODE (arg0), type,
7782 fold (build1 (REALPART_EXPR, type,
7783 TREE_OPERAND (arg0, 0))),
7784 fold (build1 (REALPART_EXPR,
7785 type, TREE_OPERAND (arg0, 1)))));
7786 return t;
7787
7788 case IMAGPART_EXPR:
7789 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7790 return convert (type, integer_zero_node);
7791 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7792 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7793 TREE_OPERAND (arg0, 0));
7794 else if (TREE_CODE (arg0) == COMPLEX_CST)
7795 return TREE_IMAGPART (arg0);
7796 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7797 return fold (build (TREE_CODE (arg0), type,
7798 fold (build1 (IMAGPART_EXPR, type,
7799 TREE_OPERAND (arg0, 0))),
7800 fold (build1 (IMAGPART_EXPR, type,
7801 TREE_OPERAND (arg0, 1)))));
7802 return t;
7803
7804 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
7805 appropriate. */
7806 case CLEANUP_POINT_EXPR:
7807 if (! has_cleanups (arg0))
7808 return TREE_OPERAND (t, 0);
7809
7810 {
7811 enum tree_code code0 = TREE_CODE (arg0);
7812 int kind0 = TREE_CODE_CLASS (code0);
7813 tree arg00 = TREE_OPERAND (arg0, 0);
7814 tree arg01;
7815
7816 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
7817 return fold (build1 (code0, type,
7818 fold (build1 (CLEANUP_POINT_EXPR,
7819 TREE_TYPE (arg00), arg00))));
7820
7821 if (kind0 == '<' || kind0 == '2'
7822 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
7823 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
7824 || code0 == TRUTH_XOR_EXPR)
7825 {
7826 arg01 = TREE_OPERAND (arg0, 1);
7827
7828 if (TREE_CONSTANT (arg00)
7829 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
7830 && ! has_cleanups (arg00)))
7831 return fold (build (code0, type, arg00,
7832 fold (build1 (CLEANUP_POINT_EXPR,
7833 TREE_TYPE (arg01), arg01))));
7834
7835 if (TREE_CONSTANT (arg01))
7836 return fold (build (code0, type,
7837 fold (build1 (CLEANUP_POINT_EXPR,
7838 TREE_TYPE (arg00), arg00)),
7839 arg01));
7840 }
7841
7842 return t;
7843 }
7844
7845 case CALL_EXPR:
7846 /* Check for a built-in function. */
7847 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
7848 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
7849 == FUNCTION_DECL)
7850 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
7851 {
7852 tree tmp = fold_builtin (expr);
7853 if (tmp)
7854 return tmp;
7855 }
7856 return t;
7857
7858 default:
7859 return t;
7860 } /* switch (code) */
7861 }
7862
7863 /* Determine if first argument is a multiple of second argument. Return 0 if
7864 it is not, or we cannot easily determined it to be.
7865
7866 An example of the sort of thing we care about (at this point; this routine
7867 could surely be made more general, and expanded to do what the *_DIV_EXPR's
7868 fold cases do now) is discovering that
7869
7870 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7871
7872 is a multiple of
7873
7874 SAVE_EXPR (J * 8)
7875
7876 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
7877
7878 This code also handles discovering that
7879
7880 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7881
7882 is a multiple of 8 so we don't have to worry about dealing with a
7883 possible remainder.
7884
7885 Note that we *look* inside a SAVE_EXPR only to determine how it was
7886 calculated; it is not safe for fold to do much of anything else with the
7887 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
7888 at run time. For example, the latter example above *cannot* be implemented
7889 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
7890 evaluation time of the original SAVE_EXPR is not necessarily the same at
7891 the time the new expression is evaluated. The only optimization of this
7892 sort that would be valid is changing
7893
7894 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
7895
7896 divided by 8 to
7897
7898 SAVE_EXPR (I) * SAVE_EXPR (J)
7899
7900 (where the same SAVE_EXPR (J) is used in the original and the
7901 transformed version). */
7902
7903 static int
7904 multiple_of_p (tree type, tree top, tree bottom)
7905 {
7906 if (operand_equal_p (top, bottom, 0))
7907 return 1;
7908
7909 if (TREE_CODE (type) != INTEGER_TYPE)
7910 return 0;
7911
7912 switch (TREE_CODE (top))
7913 {
7914 case MULT_EXPR:
7915 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7916 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7917
7918 case PLUS_EXPR:
7919 case MINUS_EXPR:
7920 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7921 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7922
7923 case LSHIFT_EXPR:
7924 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
7925 {
7926 tree op1, t1;
7927
7928 op1 = TREE_OPERAND (top, 1);
7929 /* const_binop may not detect overflow correctly,
7930 so check for it explicitly here. */
7931 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
7932 > TREE_INT_CST_LOW (op1)
7933 && TREE_INT_CST_HIGH (op1) == 0
7934 && 0 != (t1 = convert (type,
7935 const_binop (LSHIFT_EXPR, size_one_node,
7936 op1, 0)))
7937 && ! TREE_OVERFLOW (t1))
7938 return multiple_of_p (type, t1, bottom);
7939 }
7940 return 0;
7941
7942 case NOP_EXPR:
7943 /* Can't handle conversions from non-integral or wider integral type. */
7944 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
7945 || (TYPE_PRECISION (type)
7946 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
7947 return 0;
7948
7949 /* .. fall through ... */
7950
7951 case SAVE_EXPR:
7952 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
7953
7954 case INTEGER_CST:
7955 if (TREE_CODE (bottom) != INTEGER_CST
7956 || (TREE_UNSIGNED (type)
7957 && (tree_int_cst_sgn (top) < 0
7958 || tree_int_cst_sgn (bottom) < 0)))
7959 return 0;
7960 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
7961 top, bottom, 0));
7962
7963 default:
7964 return 0;
7965 }
7966 }
7967
7968 /* Return true if `t' is known to be non-negative. */
7969
7970 int
7971 tree_expr_nonnegative_p (tree t)
7972 {
7973 switch (TREE_CODE (t))
7974 {
7975 case ABS_EXPR:
7976 case FFS_EXPR:
7977 case POPCOUNT_EXPR:
7978 case PARITY_EXPR:
7979 return 1;
7980
7981 case CLZ_EXPR:
7982 case CTZ_EXPR:
7983 /* These are undefined at zero. This is true even if
7984 C[LT]Z_DEFINED_VALUE_AT_ZERO is set, since what we're
7985 computing here is a user-visible property. */
7986 return 0;
7987
7988 case INTEGER_CST:
7989 return tree_int_cst_sgn (t) >= 0;
7990
7991 case REAL_CST:
7992 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
7993
7994 case PLUS_EXPR:
7995 if (FLOAT_TYPE_P (TREE_TYPE (t)))
7996 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7997 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7998
7999 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8000 both unsigned and at least 2 bits shorter than the result. */
8001 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8002 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8003 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8004 {
8005 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8006 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8007 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8008 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8009 {
8010 unsigned int prec = MAX (TYPE_PRECISION (inner1),
8011 TYPE_PRECISION (inner2)) + 1;
8012 return prec < TYPE_PRECISION (TREE_TYPE (t));
8013 }
8014 }
8015 break;
8016
8017 case MULT_EXPR:
8018 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8019 {
8020 /* x * x for floating point x is always non-negative. */
8021 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8022 return 1;
8023 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8024 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8025 }
8026
8027 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8028 both unsigned and their total bits is shorter than the result. */
8029 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8030 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8031 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8032 {
8033 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8034 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8035 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8036 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8037 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8038 < TYPE_PRECISION (TREE_TYPE (t));
8039 }
8040 return 0;
8041
8042 case TRUNC_DIV_EXPR:
8043 case CEIL_DIV_EXPR:
8044 case FLOOR_DIV_EXPR:
8045 case ROUND_DIV_EXPR:
8046 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8047 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8048
8049 case TRUNC_MOD_EXPR:
8050 case CEIL_MOD_EXPR:
8051 case FLOOR_MOD_EXPR:
8052 case ROUND_MOD_EXPR:
8053 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8054
8055 case RDIV_EXPR:
8056 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8057 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8058
8059 case NOP_EXPR:
8060 {
8061 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8062 tree outer_type = TREE_TYPE (t);
8063
8064 if (TREE_CODE (outer_type) == REAL_TYPE)
8065 {
8066 if (TREE_CODE (inner_type) == REAL_TYPE)
8067 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8068 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8069 {
8070 if (TREE_UNSIGNED (inner_type))
8071 return 1;
8072 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8073 }
8074 }
8075 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8076 {
8077 if (TREE_CODE (inner_type) == REAL_TYPE)
8078 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8079 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8080 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8081 && TREE_UNSIGNED (inner_type);
8082 }
8083 }
8084 break;
8085
8086 case COND_EXPR:
8087 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8088 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
8089 case COMPOUND_EXPR:
8090 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8091 case MIN_EXPR:
8092 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8093 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8094 case MAX_EXPR:
8095 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8096 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8097 case MODIFY_EXPR:
8098 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8099 case BIND_EXPR:
8100 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8101 case SAVE_EXPR:
8102 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8103 case NON_LVALUE_EXPR:
8104 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8105 case FLOAT_EXPR:
8106 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8107 case RTL_EXPR:
8108 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
8109
8110 case CALL_EXPR:
8111 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
8112 {
8113 tree fndecl = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
8114 tree arglist = TREE_OPERAND (t, 1);
8115 if (TREE_CODE (fndecl) == FUNCTION_DECL
8116 && DECL_BUILT_IN (fndecl)
8117 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
8118 switch (DECL_FUNCTION_CODE (fndecl))
8119 {
8120 case BUILT_IN_CABS:
8121 case BUILT_IN_CABSL:
8122 case BUILT_IN_CABSF:
8123 case BUILT_IN_EXP:
8124 case BUILT_IN_EXPF:
8125 case BUILT_IN_EXPL:
8126 case BUILT_IN_FABS:
8127 case BUILT_IN_FABSF:
8128 case BUILT_IN_FABSL:
8129 case BUILT_IN_SQRT:
8130 case BUILT_IN_SQRTF:
8131 case BUILT_IN_SQRTL:
8132 return 1;
8133
8134 case BUILT_IN_ATAN:
8135 case BUILT_IN_ATANF:
8136 case BUILT_IN_ATANL:
8137 case BUILT_IN_CEIL:
8138 case BUILT_IN_CEILF:
8139 case BUILT_IN_CEILL:
8140 case BUILT_IN_FLOOR:
8141 case BUILT_IN_FLOORF:
8142 case BUILT_IN_FLOORL:
8143 case BUILT_IN_NEARBYINT:
8144 case BUILT_IN_NEARBYINTF:
8145 case BUILT_IN_NEARBYINTL:
8146 case BUILT_IN_ROUND:
8147 case BUILT_IN_ROUNDF:
8148 case BUILT_IN_ROUNDL:
8149 case BUILT_IN_TRUNC:
8150 case BUILT_IN_TRUNCF:
8151 case BUILT_IN_TRUNCL:
8152 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8153
8154 case BUILT_IN_POW:
8155 case BUILT_IN_POWF:
8156 case BUILT_IN_POWL:
8157 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8158
8159 default:
8160 break;
8161 }
8162 }
8163
8164 /* ... fall through ... */
8165
8166 default:
8167 if (truth_value_p (TREE_CODE (t)))
8168 /* Truth values evaluate to 0 or 1, which is nonnegative. */
8169 return 1;
8170 }
8171
8172 /* We don't know sign of `t', so be conservative and return false. */
8173 return 0;
8174 }
8175
8176 /* Return true if `r' is known to be non-negative.
8177 Only handles constants at the moment. */
8178
8179 int
8180 rtl_expr_nonnegative_p (rtx r)
8181 {
8182 switch (GET_CODE (r))
8183 {
8184 case CONST_INT:
8185 return INTVAL (r) >= 0;
8186
8187 case CONST_DOUBLE:
8188 if (GET_MODE (r) == VOIDmode)
8189 return CONST_DOUBLE_HIGH (r) >= 0;
8190 return 0;
8191
8192 case CONST_VECTOR:
8193 {
8194 int units, i;
8195 rtx elt;
8196
8197 units = CONST_VECTOR_NUNITS (r);
8198
8199 for (i = 0; i < units; ++i)
8200 {
8201 elt = CONST_VECTOR_ELT (r, i);
8202 if (!rtl_expr_nonnegative_p (elt))
8203 return 0;
8204 }
8205
8206 return 1;
8207 }
8208
8209 case SYMBOL_REF:
8210 case LABEL_REF:
8211 /* These are always nonnegative. */
8212 return 1;
8213
8214 default:
8215 return 0;
8216 }
8217 }
8218
8219 #include "gt-fold-const.h"