fold-const.c (make_range): Do not access operand 1 for a zero-operand operator.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "real.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "toplev.h"
56 #include "ggc.h"
57 #include "hashtab.h"
58 #include "langhooks.h"
59
60 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
61 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
62 static bool negate_expr_p (tree);
63 static tree negate_expr (tree);
64 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
65 static tree associate_trees (tree, tree, enum tree_code, tree);
66 static tree int_const_binop (enum tree_code, tree, tree, int);
67 static tree const_binop (enum tree_code, tree, tree, int);
68 static hashval_t size_htab_hash (const void *);
69 static int size_htab_eq (const void *, const void *);
70 static tree fold_convert (tree, tree);
71 static enum tree_code invert_tree_comparison (enum tree_code);
72 static enum tree_code swap_tree_comparison (enum tree_code);
73 static int comparison_to_compcode (enum tree_code);
74 static enum tree_code compcode_to_comparison (int);
75 static int truth_value_p (enum tree_code);
76 static int operand_equal_for_comparison_p (tree, tree, tree);
77 static int twoval_comparison_p (tree, tree *, tree *, int *);
78 static tree eval_subst (tree, tree, tree, tree, tree);
79 static tree pedantic_omit_one_operand (tree, tree, tree);
80 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
81 static tree make_bit_field_ref (tree, tree, int, int, int);
82 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
83 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
84 enum machine_mode *, int *, int *,
85 tree *, tree *);
86 static int all_ones_mask_p (tree, int);
87 static tree sign_bit_p (tree, tree);
88 static int simple_operand_p (tree);
89 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
90 static tree make_range (tree, int *, tree *, tree *);
91 static tree build_range_check (tree, tree, int, tree, tree);
92 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
93 tree);
94 static tree fold_range_test (tree);
95 static tree unextend (tree, int, int, tree);
96 static tree fold_truthop (enum tree_code, tree, tree, tree);
97 static tree optimize_minmax_comparison (tree);
98 static tree extract_muldiv (tree, tree, enum tree_code, tree);
99 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
100 static tree strip_compound_expr (tree, tree);
101 static int multiple_of_p (tree, tree, tree);
102 static tree constant_boolean_node (int, tree);
103 static int count_cond (tree, int);
104 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
105 tree, int);
106 static bool fold_real_zero_addition_p (tree, tree, int);
107 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
108 tree, tree, tree);
109 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
110
111 /* The following constants represent a bit based encoding of GCC's
112 comparison operators. This encoding simplifies transformations
113 on relational comparison operators, such as AND and OR. */
114 #define COMPCODE_FALSE 0
115 #define COMPCODE_LT 1
116 #define COMPCODE_EQ 2
117 #define COMPCODE_LE 3
118 #define COMPCODE_GT 4
119 #define COMPCODE_NE 5
120 #define COMPCODE_GE 6
121 #define COMPCODE_TRUE 7
122
123 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
124 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
125 and SUM1. Then this yields nonzero if overflow occurred during the
126 addition.
127
128 Overflow occurs if A and B have the same sign, but A and SUM differ in
129 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
130 sign. */
131 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
132 \f
133 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
134 We do that by representing the two-word integer in 4 words, with only
135 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
136 number. The value of the word is LOWPART + HIGHPART * BASE. */
137
138 #define LOWPART(x) \
139 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
140 #define HIGHPART(x) \
141 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
142 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
143
144 /* Unpack a two-word integer into 4 words.
145 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
146 WORDS points to the array of HOST_WIDE_INTs. */
147
148 static void
149 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
150 {
151 words[0] = LOWPART (low);
152 words[1] = HIGHPART (low);
153 words[2] = LOWPART (hi);
154 words[3] = HIGHPART (hi);
155 }
156
157 /* Pack an array of 4 words into a two-word integer.
158 WORDS points to the array of words.
159 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
160
161 static void
162 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low, HOST_WIDE_INT *hi)
163 {
164 *low = words[0] + words[1] * BASE;
165 *hi = words[2] + words[3] * BASE;
166 }
167 \f
168 /* Make the integer constant T valid for its type by setting to 0 or 1 all
169 the bits in the constant that don't belong in the type.
170
171 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
172 nonzero, a signed overflow has already occurred in calculating T, so
173 propagate it. */
174
175 int
176 force_fit_type (tree t, int overflow)
177 {
178 unsigned HOST_WIDE_INT low;
179 HOST_WIDE_INT high;
180 unsigned int prec;
181
182 if (TREE_CODE (t) == REAL_CST)
183 {
184 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
185 Consider doing it via real_convert now. */
186 return overflow;
187 }
188
189 else if (TREE_CODE (t) != INTEGER_CST)
190 return overflow;
191
192 low = TREE_INT_CST_LOW (t);
193 high = TREE_INT_CST_HIGH (t);
194
195 if (POINTER_TYPE_P (TREE_TYPE (t)))
196 prec = POINTER_SIZE;
197 else
198 prec = TYPE_PRECISION (TREE_TYPE (t));
199
200 /* First clear all bits that are beyond the type's precision. */
201
202 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
203 ;
204 else if (prec > HOST_BITS_PER_WIDE_INT)
205 TREE_INT_CST_HIGH (t)
206 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
207 else
208 {
209 TREE_INT_CST_HIGH (t) = 0;
210 if (prec < HOST_BITS_PER_WIDE_INT)
211 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
212 }
213
214 /* Unsigned types do not suffer sign extension or overflow unless they
215 are a sizetype. */
216 if (TREE_UNSIGNED (TREE_TYPE (t))
217 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
218 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
219 return overflow;
220
221 /* If the value's sign bit is set, extend the sign. */
222 if (prec != 2 * HOST_BITS_PER_WIDE_INT
223 && (prec > HOST_BITS_PER_WIDE_INT
224 ? 0 != (TREE_INT_CST_HIGH (t)
225 & ((HOST_WIDE_INT) 1
226 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
227 : 0 != (TREE_INT_CST_LOW (t)
228 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
229 {
230 /* Value is negative:
231 set to 1 all the bits that are outside this type's precision. */
232 if (prec > HOST_BITS_PER_WIDE_INT)
233 TREE_INT_CST_HIGH (t)
234 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
235 else
236 {
237 TREE_INT_CST_HIGH (t) = -1;
238 if (prec < HOST_BITS_PER_WIDE_INT)
239 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
240 }
241 }
242
243 /* Return nonzero if signed overflow occurred. */
244 return
245 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
246 != 0);
247 }
248 \f
249 /* Add two doubleword integers with doubleword result.
250 Each argument is given as two `HOST_WIDE_INT' pieces.
251 One argument is L1 and H1; the other, L2 and H2.
252 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
253
254 int
255 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, unsigned HOST_WIDE_INT l2,
256 HOST_WIDE_INT h2, unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
257 {
258 unsigned HOST_WIDE_INT l;
259 HOST_WIDE_INT h;
260
261 l = l1 + l2;
262 h = h1 + h2 + (l < l1);
263
264 *lv = l;
265 *hv = h;
266 return OVERFLOW_SUM_SIGN (h1, h2, h);
267 }
268
269 /* Negate a doubleword integer with doubleword result.
270 Return nonzero if the operation overflows, assuming it's signed.
271 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
272 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
273
274 int
275 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, unsigned HOST_WIDE_INT *lv,
276 HOST_WIDE_INT *hv)
277 {
278 if (l1 == 0)
279 {
280 *lv = 0;
281 *hv = - h1;
282 return (*hv & h1) < 0;
283 }
284 else
285 {
286 *lv = -l1;
287 *hv = ~h1;
288 return 0;
289 }
290 }
291 \f
292 /* Multiply two doubleword integers with doubleword result.
293 Return nonzero if the operation overflows, assuming it's signed.
294 Each argument is given as two `HOST_WIDE_INT' pieces.
295 One argument is L1 and H1; the other, L2 and H2.
296 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
297
298 int
299 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, unsigned HOST_WIDE_INT l2,
300 HOST_WIDE_INT h2, unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
301 {
302 HOST_WIDE_INT arg1[4];
303 HOST_WIDE_INT arg2[4];
304 HOST_WIDE_INT prod[4 * 2];
305 unsigned HOST_WIDE_INT carry;
306 int i, j, k;
307 unsigned HOST_WIDE_INT toplow, neglow;
308 HOST_WIDE_INT tophigh, neghigh;
309
310 encode (arg1, l1, h1);
311 encode (arg2, l2, h2);
312
313 memset ((char *) prod, 0, sizeof prod);
314
315 for (i = 0; i < 4; i++)
316 {
317 carry = 0;
318 for (j = 0; j < 4; j++)
319 {
320 k = i + j;
321 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
322 carry += arg1[i] * arg2[j];
323 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
324 carry += prod[k];
325 prod[k] = LOWPART (carry);
326 carry = HIGHPART (carry);
327 }
328 prod[i + 4] = carry;
329 }
330
331 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
332
333 /* Check for overflow by calculating the top half of the answer in full;
334 it should agree with the low half's sign bit. */
335 decode (prod + 4, &toplow, &tophigh);
336 if (h1 < 0)
337 {
338 neg_double (l2, h2, &neglow, &neghigh);
339 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
340 }
341 if (h2 < 0)
342 {
343 neg_double (l1, h1, &neglow, &neghigh);
344 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
345 }
346 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
347 }
348 \f
349 /* Shift the doubleword integer in L1, H1 left by COUNT places
350 keeping only PREC bits of result.
351 Shift right if COUNT is negative.
352 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
353 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
354
355 void
356 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, HOST_WIDE_INT count,
357 unsigned int prec, unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
358 int arith)
359 {
360 unsigned HOST_WIDE_INT signmask;
361
362 if (count < 0)
363 {
364 rshift_double (l1, h1, -count, prec, lv, hv, arith);
365 return;
366 }
367
368 #ifdef SHIFT_COUNT_TRUNCATED
369 if (SHIFT_COUNT_TRUNCATED)
370 count %= prec;
371 #endif
372
373 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
374 {
375 /* Shifting by the host word size is undefined according to the
376 ANSI standard, so we must handle this as a special case. */
377 *hv = 0;
378 *lv = 0;
379 }
380 else if (count >= HOST_BITS_PER_WIDE_INT)
381 {
382 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
383 *lv = 0;
384 }
385 else
386 {
387 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
388 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
389 *lv = l1 << count;
390 }
391
392 /* Sign extend all bits that are beyond the precision. */
393
394 signmask = -((prec > HOST_BITS_PER_WIDE_INT
395 ? ((unsigned HOST_WIDE_INT) *hv
396 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
397 : (*lv >> (prec - 1))) & 1);
398
399 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
400 ;
401 else if (prec >= HOST_BITS_PER_WIDE_INT)
402 {
403 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
404 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
405 }
406 else
407 {
408 *hv = signmask;
409 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
410 *lv |= signmask << prec;
411 }
412 }
413
414 /* Shift the doubleword integer in L1, H1 right by COUNT places
415 keeping only PREC bits of result. COUNT must be positive.
416 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
417 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
418
419 void
420 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, HOST_WIDE_INT count,
421 unsigned int prec, unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
422 int arith)
423 {
424 unsigned HOST_WIDE_INT signmask;
425
426 signmask = (arith
427 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
428 : 0);
429
430 #ifdef SHIFT_COUNT_TRUNCATED
431 if (SHIFT_COUNT_TRUNCATED)
432 count %= prec;
433 #endif
434
435 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
436 {
437 /* Shifting by the host word size is undefined according to the
438 ANSI standard, so we must handle this as a special case. */
439 *hv = 0;
440 *lv = 0;
441 }
442 else if (count >= HOST_BITS_PER_WIDE_INT)
443 {
444 *hv = 0;
445 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
446 }
447 else
448 {
449 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
450 *lv = ((l1 >> count)
451 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
452 }
453
454 /* Zero / sign extend all bits that are beyond the precision. */
455
456 if (count >= (HOST_WIDE_INT)prec)
457 {
458 *hv = signmask;
459 *lv = signmask;
460 }
461 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
462 ;
463 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
464 {
465 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
466 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
467 }
468 else
469 {
470 *hv = signmask;
471 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
472 *lv |= signmask << (prec - count);
473 }
474 }
475 \f
476 /* Rotate the doubleword integer in L1, H1 left by COUNT places
477 keeping only PREC bits of result.
478 Rotate right if COUNT is negative.
479 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
480
481 void
482 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, HOST_WIDE_INT count,
483 unsigned int prec, unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
484 {
485 unsigned HOST_WIDE_INT s1l, s2l;
486 HOST_WIDE_INT s1h, s2h;
487
488 count %= prec;
489 if (count < 0)
490 count += prec;
491
492 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
493 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
494 *lv = s1l | s2l;
495 *hv = s1h | s2h;
496 }
497
498 /* Rotate the doubleword integer in L1, H1 left by COUNT places
499 keeping only PREC bits of result. COUNT must be positive.
500 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
501
502 void
503 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, HOST_WIDE_INT count,
504 unsigned int prec, unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
505 {
506 unsigned HOST_WIDE_INT s1l, s2l;
507 HOST_WIDE_INT s1h, s2h;
508
509 count %= prec;
510 if (count < 0)
511 count += prec;
512
513 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
514 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
515 *lv = s1l | s2l;
516 *hv = s1h | s2h;
517 }
518 \f
519 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
520 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
521 CODE is a tree code for a kind of division, one of
522 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
523 or EXACT_DIV_EXPR
524 It controls how the quotient is rounded to an integer.
525 Return nonzero if the operation overflows.
526 UNS nonzero says do unsigned division. */
527
528 int
529 div_and_round_double (enum tree_code code, int uns,
530 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
531 HOST_WIDE_INT hnum_orig,
532 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
533 HOST_WIDE_INT hden_orig, unsigned HOST_WIDE_INT *lquo,
534 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
535 HOST_WIDE_INT *hrem)
536 {
537 int quo_neg = 0;
538 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
539 HOST_WIDE_INT den[4], quo[4];
540 int i, j;
541 unsigned HOST_WIDE_INT work;
542 unsigned HOST_WIDE_INT carry = 0;
543 unsigned HOST_WIDE_INT lnum = lnum_orig;
544 HOST_WIDE_INT hnum = hnum_orig;
545 unsigned HOST_WIDE_INT lden = lden_orig;
546 HOST_WIDE_INT hden = hden_orig;
547 int overflow = 0;
548
549 if (hden == 0 && lden == 0)
550 overflow = 1, lden = 1;
551
552 /* calculate quotient sign and convert operands to unsigned. */
553 if (!uns)
554 {
555 if (hnum < 0)
556 {
557 quo_neg = ~ quo_neg;
558 /* (minimum integer) / (-1) is the only overflow case. */
559 if (neg_double (lnum, hnum, &lnum, &hnum)
560 && ((HOST_WIDE_INT) lden & hden) == -1)
561 overflow = 1;
562 }
563 if (hden < 0)
564 {
565 quo_neg = ~ quo_neg;
566 neg_double (lden, hden, &lden, &hden);
567 }
568 }
569
570 if (hnum == 0 && hden == 0)
571 { /* single precision */
572 *hquo = *hrem = 0;
573 /* This unsigned division rounds toward zero. */
574 *lquo = lnum / lden;
575 goto finish_up;
576 }
577
578 if (hnum == 0)
579 { /* trivial case: dividend < divisor */
580 /* hden != 0 already checked. */
581 *hquo = *lquo = 0;
582 *hrem = hnum;
583 *lrem = lnum;
584 goto finish_up;
585 }
586
587 memset ((char *) quo, 0, sizeof quo);
588
589 memset ((char *) num, 0, sizeof num); /* to zero 9th element */
590 memset ((char *) den, 0, sizeof den);
591
592 encode (num, lnum, hnum);
593 encode (den, lden, hden);
594
595 /* Special code for when the divisor < BASE. */
596 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
597 {
598 /* hnum != 0 already checked. */
599 for (i = 4 - 1; i >= 0; i--)
600 {
601 work = num[i] + carry * BASE;
602 quo[i] = work / lden;
603 carry = work % lden;
604 }
605 }
606 else
607 {
608 /* Full double precision division,
609 with thanks to Don Knuth's "Seminumerical Algorithms". */
610 int num_hi_sig, den_hi_sig;
611 unsigned HOST_WIDE_INT quo_est, scale;
612
613 /* Find the highest nonzero divisor digit. */
614 for (i = 4 - 1;; i--)
615 if (den[i] != 0)
616 {
617 den_hi_sig = i;
618 break;
619 }
620
621 /* Insure that the first digit of the divisor is at least BASE/2.
622 This is required by the quotient digit estimation algorithm. */
623
624 scale = BASE / (den[den_hi_sig] + 1);
625 if (scale > 1)
626 { /* scale divisor and dividend */
627 carry = 0;
628 for (i = 0; i <= 4 - 1; i++)
629 {
630 work = (num[i] * scale) + carry;
631 num[i] = LOWPART (work);
632 carry = HIGHPART (work);
633 }
634
635 num[4] = carry;
636 carry = 0;
637 for (i = 0; i <= 4 - 1; i++)
638 {
639 work = (den[i] * scale) + carry;
640 den[i] = LOWPART (work);
641 carry = HIGHPART (work);
642 if (den[i] != 0) den_hi_sig = i;
643 }
644 }
645
646 num_hi_sig = 4;
647
648 /* Main loop */
649 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
650 {
651 /* Guess the next quotient digit, quo_est, by dividing the first
652 two remaining dividend digits by the high order quotient digit.
653 quo_est is never low and is at most 2 high. */
654 unsigned HOST_WIDE_INT tmp;
655
656 num_hi_sig = i + den_hi_sig + 1;
657 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
658 if (num[num_hi_sig] != den[den_hi_sig])
659 quo_est = work / den[den_hi_sig];
660 else
661 quo_est = BASE - 1;
662
663 /* Refine quo_est so it's usually correct, and at most one high. */
664 tmp = work - quo_est * den[den_hi_sig];
665 if (tmp < BASE
666 && (den[den_hi_sig - 1] * quo_est
667 > (tmp * BASE + num[num_hi_sig - 2])))
668 quo_est--;
669
670 /* Try QUO_EST as the quotient digit, by multiplying the
671 divisor by QUO_EST and subtracting from the remaining dividend.
672 Keep in mind that QUO_EST is the I - 1st digit. */
673
674 carry = 0;
675 for (j = 0; j <= den_hi_sig; j++)
676 {
677 work = quo_est * den[j] + carry;
678 carry = HIGHPART (work);
679 work = num[i + j] - LOWPART (work);
680 num[i + j] = LOWPART (work);
681 carry += HIGHPART (work) != 0;
682 }
683
684 /* If quo_est was high by one, then num[i] went negative and
685 we need to correct things. */
686 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
687 {
688 quo_est--;
689 carry = 0; /* add divisor back in */
690 for (j = 0; j <= den_hi_sig; j++)
691 {
692 work = num[i + j] + den[j] + carry;
693 carry = HIGHPART (work);
694 num[i + j] = LOWPART (work);
695 }
696
697 num [num_hi_sig] += carry;
698 }
699
700 /* Store the quotient digit. */
701 quo[i] = quo_est;
702 }
703 }
704
705 decode (quo, lquo, hquo);
706
707 finish_up:
708 /* if result is negative, make it so. */
709 if (quo_neg)
710 neg_double (*lquo, *hquo, lquo, hquo);
711
712 /* compute trial remainder: rem = num - (quo * den) */
713 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
714 neg_double (*lrem, *hrem, lrem, hrem);
715 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
716
717 switch (code)
718 {
719 case TRUNC_DIV_EXPR:
720 case TRUNC_MOD_EXPR: /* round toward zero */
721 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
722 return overflow;
723
724 case FLOOR_DIV_EXPR:
725 case FLOOR_MOD_EXPR: /* round toward negative infinity */
726 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
727 {
728 /* quo = quo - 1; */
729 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
730 lquo, hquo);
731 }
732 else
733 return overflow;
734 break;
735
736 case CEIL_DIV_EXPR:
737 case CEIL_MOD_EXPR: /* round toward positive infinity */
738 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
739 {
740 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
741 lquo, hquo);
742 }
743 else
744 return overflow;
745 break;
746
747 case ROUND_DIV_EXPR:
748 case ROUND_MOD_EXPR: /* round to closest integer */
749 {
750 unsigned HOST_WIDE_INT labs_rem = *lrem;
751 HOST_WIDE_INT habs_rem = *hrem;
752 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
753 HOST_WIDE_INT habs_den = hden, htwice;
754
755 /* Get absolute values. */
756 if (*hrem < 0)
757 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
758 if (hden < 0)
759 neg_double (lden, hden, &labs_den, &habs_den);
760
761 /* If (2 * abs (lrem) >= abs (lden)) */
762 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
763 labs_rem, habs_rem, &ltwice, &htwice);
764
765 if (((unsigned HOST_WIDE_INT) habs_den
766 < (unsigned HOST_WIDE_INT) htwice)
767 || (((unsigned HOST_WIDE_INT) habs_den
768 == (unsigned HOST_WIDE_INT) htwice)
769 && (labs_den < ltwice)))
770 {
771 if (*hquo < 0)
772 /* quo = quo - 1; */
773 add_double (*lquo, *hquo,
774 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
775 else
776 /* quo = quo + 1; */
777 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
778 lquo, hquo);
779 }
780 else
781 return overflow;
782 }
783 break;
784
785 default:
786 abort ();
787 }
788
789 /* compute true remainder: rem = num - (quo * den) */
790 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
791 neg_double (*lrem, *hrem, lrem, hrem);
792 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
793 return overflow;
794 }
795 \f
796 /* Determine whether an expression T can be cheaply negated using
797 the function negate_expr. */
798
799 static bool
800 negate_expr_p (tree t)
801 {
802 unsigned HOST_WIDE_INT val;
803 unsigned int prec;
804 tree type;
805
806 if (t == 0)
807 return false;
808
809 type = TREE_TYPE (t);
810
811 STRIP_SIGN_NOPS (t);
812 switch (TREE_CODE (t))
813 {
814 case INTEGER_CST:
815 if (TREE_UNSIGNED (type))
816 return false;
817
818 /* Check that -CST will not overflow type. */
819 prec = TYPE_PRECISION (type);
820 if (prec > HOST_BITS_PER_WIDE_INT)
821 {
822 if (TREE_INT_CST_LOW (t) != 0)
823 return true;
824 prec -= HOST_BITS_PER_WIDE_INT;
825 val = TREE_INT_CST_HIGH (t);
826 }
827 else
828 val = TREE_INT_CST_LOW (t);
829 if (prec < HOST_BITS_PER_WIDE_INT)
830 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
831 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
832
833 case REAL_CST:
834 case NEGATE_EXPR:
835 case MINUS_EXPR:
836 return true;
837
838 default:
839 break;
840 }
841 return false;
842 }
843
844 /* Given T, an expression, return the negation of T. Allow for T to be
845 null, in which case return null. */
846
847 static tree
848 negate_expr (tree t)
849 {
850 tree type;
851 tree tem;
852
853 if (t == 0)
854 return 0;
855
856 type = TREE_TYPE (t);
857 STRIP_SIGN_NOPS (t);
858
859 switch (TREE_CODE (t))
860 {
861 case INTEGER_CST:
862 case REAL_CST:
863 if (! TREE_UNSIGNED (type)
864 && 0 != (tem = fold (build1 (NEGATE_EXPR, type, t)))
865 && ! TREE_OVERFLOW (tem))
866 return tem;
867 break;
868
869 case NEGATE_EXPR:
870 return convert (type, TREE_OPERAND (t, 0));
871
872 case MINUS_EXPR:
873 /* - (A - B) -> B - A */
874 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
875 return convert (type,
876 fold (build (MINUS_EXPR, TREE_TYPE (t),
877 TREE_OPERAND (t, 1),
878 TREE_OPERAND (t, 0))));
879 break;
880
881 default:
882 break;
883 }
884
885 return convert (type, fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t)));
886 }
887 \f
888 /* Split a tree IN into a constant, literal and variable parts that could be
889 combined with CODE to make IN. "constant" means an expression with
890 TREE_CONSTANT but that isn't an actual constant. CODE must be a
891 commutative arithmetic operation. Store the constant part into *CONP,
892 the literal in *LITP and return the variable part. If a part isn't
893 present, set it to null. If the tree does not decompose in this way,
894 return the entire tree as the variable part and the other parts as null.
895
896 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
897 case, we negate an operand that was subtracted. Except if it is a
898 literal for which we use *MINUS_LITP instead.
899
900 If NEGATE_P is true, we are negating all of IN, again except a literal
901 for which we use *MINUS_LITP instead.
902
903 If IN is itself a literal or constant, return it as appropriate.
904
905 Note that we do not guarantee that any of the three values will be the
906 same type as IN, but they will have the same signedness and mode. */
907
908 static tree
909 split_tree (tree in, enum tree_code code, tree *conp, tree *litp, tree *minus_litp, int negate_p)
910 {
911 tree var = 0;
912
913 *conp = 0;
914 *litp = 0;
915 *minus_litp = 0;
916
917 /* Strip any conversions that don't change the machine mode or signedness. */
918 STRIP_SIGN_NOPS (in);
919
920 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
921 *litp = in;
922 else if (TREE_CODE (in) == code
923 || (! FLOAT_TYPE_P (TREE_TYPE (in))
924 /* We can associate addition and subtraction together (even
925 though the C standard doesn't say so) for integers because
926 the value is not affected. For reals, the value might be
927 affected, so we can't. */
928 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
929 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
930 {
931 tree op0 = TREE_OPERAND (in, 0);
932 tree op1 = TREE_OPERAND (in, 1);
933 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
934 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
935
936 /* First see if either of the operands is a literal, then a constant. */
937 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
938 *litp = op0, op0 = 0;
939 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
940 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
941
942 if (op0 != 0 && TREE_CONSTANT (op0))
943 *conp = op0, op0 = 0;
944 else if (op1 != 0 && TREE_CONSTANT (op1))
945 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
946
947 /* If we haven't dealt with either operand, this is not a case we can
948 decompose. Otherwise, VAR is either of the ones remaining, if any. */
949 if (op0 != 0 && op1 != 0)
950 var = in;
951 else if (op0 != 0)
952 var = op0;
953 else
954 var = op1, neg_var_p = neg1_p;
955
956 /* Now do any needed negations. */
957 if (neg_litp_p)
958 *minus_litp = *litp, *litp = 0;
959 if (neg_conp_p)
960 *conp = negate_expr (*conp);
961 if (neg_var_p)
962 var = negate_expr (var);
963 }
964 else if (TREE_CONSTANT (in))
965 *conp = in;
966 else
967 var = in;
968
969 if (negate_p)
970 {
971 if (*litp)
972 *minus_litp = *litp, *litp = 0;
973 else if (*minus_litp)
974 *litp = *minus_litp, *minus_litp = 0;
975 *conp = negate_expr (*conp);
976 var = negate_expr (var);
977 }
978
979 return var;
980 }
981
982 /* Re-associate trees split by the above function. T1 and T2 are either
983 expressions to associate or null. Return the new expression, if any. If
984 we build an operation, do it in TYPE and with CODE. */
985
986 static tree
987 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
988 {
989 if (t1 == 0)
990 return t2;
991 else if (t2 == 0)
992 return t1;
993
994 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
995 try to fold this since we will have infinite recursion. But do
996 deal with any NEGATE_EXPRs. */
997 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
998 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
999 {
1000 if (code == PLUS_EXPR)
1001 {
1002 if (TREE_CODE (t1) == NEGATE_EXPR)
1003 return build (MINUS_EXPR, type, convert (type, t2),
1004 convert (type, TREE_OPERAND (t1, 0)));
1005 else if (TREE_CODE (t2) == NEGATE_EXPR)
1006 return build (MINUS_EXPR, type, convert (type, t1),
1007 convert (type, TREE_OPERAND (t2, 0)));
1008 }
1009 return build (code, type, convert (type, t1), convert (type, t2));
1010 }
1011
1012 return fold (build (code, type, convert (type, t1), convert (type, t2)));
1013 }
1014 \f
1015 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1016 to produce a new constant.
1017
1018 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1019
1020 static tree
1021 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1022 {
1023 unsigned HOST_WIDE_INT int1l, int2l;
1024 HOST_WIDE_INT int1h, int2h;
1025 unsigned HOST_WIDE_INT low;
1026 HOST_WIDE_INT hi;
1027 unsigned HOST_WIDE_INT garbagel;
1028 HOST_WIDE_INT garbageh;
1029 tree t;
1030 tree type = TREE_TYPE (arg1);
1031 int uns = TREE_UNSIGNED (type);
1032 int is_sizetype
1033 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1034 int overflow = 0;
1035 int no_overflow = 0;
1036
1037 int1l = TREE_INT_CST_LOW (arg1);
1038 int1h = TREE_INT_CST_HIGH (arg1);
1039 int2l = TREE_INT_CST_LOW (arg2);
1040 int2h = TREE_INT_CST_HIGH (arg2);
1041
1042 switch (code)
1043 {
1044 case BIT_IOR_EXPR:
1045 low = int1l | int2l, hi = int1h | int2h;
1046 break;
1047
1048 case BIT_XOR_EXPR:
1049 low = int1l ^ int2l, hi = int1h ^ int2h;
1050 break;
1051
1052 case BIT_AND_EXPR:
1053 low = int1l & int2l, hi = int1h & int2h;
1054 break;
1055
1056 case BIT_ANDTC_EXPR:
1057 low = int1l & ~int2l, hi = int1h & ~int2h;
1058 break;
1059
1060 case RSHIFT_EXPR:
1061 int2l = -int2l;
1062 case LSHIFT_EXPR:
1063 /* It's unclear from the C standard whether shifts can overflow.
1064 The following code ignores overflow; perhaps a C standard
1065 interpretation ruling is needed. */
1066 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1067 &low, &hi, !uns);
1068 no_overflow = 1;
1069 break;
1070
1071 case RROTATE_EXPR:
1072 int2l = - int2l;
1073 case LROTATE_EXPR:
1074 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1075 &low, &hi);
1076 break;
1077
1078 case PLUS_EXPR:
1079 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1080 break;
1081
1082 case MINUS_EXPR:
1083 neg_double (int2l, int2h, &low, &hi);
1084 add_double (int1l, int1h, low, hi, &low, &hi);
1085 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1086 break;
1087
1088 case MULT_EXPR:
1089 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1090 break;
1091
1092 case TRUNC_DIV_EXPR:
1093 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1094 case EXACT_DIV_EXPR:
1095 /* This is a shortcut for a common special case. */
1096 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1097 && ! TREE_CONSTANT_OVERFLOW (arg1)
1098 && ! TREE_CONSTANT_OVERFLOW (arg2)
1099 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1100 {
1101 if (code == CEIL_DIV_EXPR)
1102 int1l += int2l - 1;
1103
1104 low = int1l / int2l, hi = 0;
1105 break;
1106 }
1107
1108 /* ... fall through ... */
1109
1110 case ROUND_DIV_EXPR:
1111 if (int2h == 0 && int2l == 1)
1112 {
1113 low = int1l, hi = int1h;
1114 break;
1115 }
1116 if (int1l == int2l && int1h == int2h
1117 && ! (int1l == 0 && int1h == 0))
1118 {
1119 low = 1, hi = 0;
1120 break;
1121 }
1122 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1123 &low, &hi, &garbagel, &garbageh);
1124 break;
1125
1126 case TRUNC_MOD_EXPR:
1127 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1128 /* This is a shortcut for a common special case. */
1129 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1130 && ! TREE_CONSTANT_OVERFLOW (arg1)
1131 && ! TREE_CONSTANT_OVERFLOW (arg2)
1132 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1133 {
1134 if (code == CEIL_MOD_EXPR)
1135 int1l += int2l - 1;
1136 low = int1l % int2l, hi = 0;
1137 break;
1138 }
1139
1140 /* ... fall through ... */
1141
1142 case ROUND_MOD_EXPR:
1143 overflow = div_and_round_double (code, uns,
1144 int1l, int1h, int2l, int2h,
1145 &garbagel, &garbageh, &low, &hi);
1146 break;
1147
1148 case MIN_EXPR:
1149 case MAX_EXPR:
1150 if (uns)
1151 low = (((unsigned HOST_WIDE_INT) int1h
1152 < (unsigned HOST_WIDE_INT) int2h)
1153 || (((unsigned HOST_WIDE_INT) int1h
1154 == (unsigned HOST_WIDE_INT) int2h)
1155 && int1l < int2l));
1156 else
1157 low = (int1h < int2h
1158 || (int1h == int2h && int1l < int2l));
1159
1160 if (low == (code == MIN_EXPR))
1161 low = int1l, hi = int1h;
1162 else
1163 low = int2l, hi = int2h;
1164 break;
1165
1166 default:
1167 abort ();
1168 }
1169
1170 /* If this is for a sizetype, can be represented as one (signed)
1171 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1172 constants. */
1173 if (is_sizetype
1174 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1175 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1176 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1177 return size_int_type_wide (low, type);
1178 else
1179 {
1180 t = build_int_2 (low, hi);
1181 TREE_TYPE (t) = TREE_TYPE (arg1);
1182 }
1183
1184 TREE_OVERFLOW (t)
1185 = ((notrunc
1186 ? (!uns || is_sizetype) && overflow
1187 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1188 && ! no_overflow))
1189 | TREE_OVERFLOW (arg1)
1190 | TREE_OVERFLOW (arg2));
1191
1192 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1193 So check if force_fit_type truncated the value. */
1194 if (is_sizetype
1195 && ! TREE_OVERFLOW (t)
1196 && (TREE_INT_CST_HIGH (t) != hi
1197 || TREE_INT_CST_LOW (t) != low))
1198 TREE_OVERFLOW (t) = 1;
1199
1200 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1201 | TREE_CONSTANT_OVERFLOW (arg1)
1202 | TREE_CONSTANT_OVERFLOW (arg2));
1203 return t;
1204 }
1205
1206 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1207 constant. We assume ARG1 and ARG2 have the same data type, or at least
1208 are the same kind of constant and the same machine mode.
1209
1210 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1211
1212 static tree
1213 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1214 {
1215 STRIP_NOPS (arg1);
1216 STRIP_NOPS (arg2);
1217
1218 if (TREE_CODE (arg1) == INTEGER_CST)
1219 return int_const_binop (code, arg1, arg2, notrunc);
1220
1221 if (TREE_CODE (arg1) == REAL_CST)
1222 {
1223 REAL_VALUE_TYPE d1;
1224 REAL_VALUE_TYPE d2;
1225 REAL_VALUE_TYPE value;
1226 tree t;
1227
1228 d1 = TREE_REAL_CST (arg1);
1229 d2 = TREE_REAL_CST (arg2);
1230
1231 /* If either operand is a NaN, just return it. Otherwise, set up
1232 for floating-point trap; we return an overflow. */
1233 if (REAL_VALUE_ISNAN (d1))
1234 return arg1;
1235 else if (REAL_VALUE_ISNAN (d2))
1236 return arg2;
1237
1238 REAL_ARITHMETIC (value, code, d1, d2);
1239
1240 t = build_real (TREE_TYPE (arg1),
1241 real_value_truncate (TYPE_MODE (TREE_TYPE (arg1)),
1242 value));
1243
1244 TREE_OVERFLOW (t)
1245 = (force_fit_type (t, 0)
1246 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1247 TREE_CONSTANT_OVERFLOW (t)
1248 = TREE_OVERFLOW (t)
1249 | TREE_CONSTANT_OVERFLOW (arg1)
1250 | TREE_CONSTANT_OVERFLOW (arg2);
1251 return t;
1252 }
1253 if (TREE_CODE (arg1) == COMPLEX_CST)
1254 {
1255 tree type = TREE_TYPE (arg1);
1256 tree r1 = TREE_REALPART (arg1);
1257 tree i1 = TREE_IMAGPART (arg1);
1258 tree r2 = TREE_REALPART (arg2);
1259 tree i2 = TREE_IMAGPART (arg2);
1260 tree t;
1261
1262 switch (code)
1263 {
1264 case PLUS_EXPR:
1265 t = build_complex (type,
1266 const_binop (PLUS_EXPR, r1, r2, notrunc),
1267 const_binop (PLUS_EXPR, i1, i2, notrunc));
1268 break;
1269
1270 case MINUS_EXPR:
1271 t = build_complex (type,
1272 const_binop (MINUS_EXPR, r1, r2, notrunc),
1273 const_binop (MINUS_EXPR, i1, i2, notrunc));
1274 break;
1275
1276 case MULT_EXPR:
1277 t = build_complex (type,
1278 const_binop (MINUS_EXPR,
1279 const_binop (MULT_EXPR,
1280 r1, r2, notrunc),
1281 const_binop (MULT_EXPR,
1282 i1, i2, notrunc),
1283 notrunc),
1284 const_binop (PLUS_EXPR,
1285 const_binop (MULT_EXPR,
1286 r1, i2, notrunc),
1287 const_binop (MULT_EXPR,
1288 i1, r2, notrunc),
1289 notrunc));
1290 break;
1291
1292 case RDIV_EXPR:
1293 {
1294 tree magsquared
1295 = const_binop (PLUS_EXPR,
1296 const_binop (MULT_EXPR, r2, r2, notrunc),
1297 const_binop (MULT_EXPR, i2, i2, notrunc),
1298 notrunc);
1299
1300 t = build_complex (type,
1301 const_binop
1302 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1303 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1304 const_binop (PLUS_EXPR,
1305 const_binop (MULT_EXPR, r1, r2,
1306 notrunc),
1307 const_binop (MULT_EXPR, i1, i2,
1308 notrunc),
1309 notrunc),
1310 magsquared, notrunc),
1311 const_binop
1312 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1313 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1314 const_binop (MINUS_EXPR,
1315 const_binop (MULT_EXPR, i1, r2,
1316 notrunc),
1317 const_binop (MULT_EXPR, r1, i2,
1318 notrunc),
1319 notrunc),
1320 magsquared, notrunc));
1321 }
1322 break;
1323
1324 default:
1325 abort ();
1326 }
1327 return t;
1328 }
1329 return 0;
1330 }
1331
1332 /* These are the hash table functions for the hash table of INTEGER_CST
1333 nodes of a sizetype. */
1334
1335 /* Return the hash code code X, an INTEGER_CST. */
1336
1337 static hashval_t
1338 size_htab_hash (const void *x)
1339 {
1340 tree t = (tree) x;
1341
1342 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1343 ^ htab_hash_pointer (TREE_TYPE (t))
1344 ^ (TREE_OVERFLOW (t) << 20));
1345 }
1346
1347 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1348 is the same as that given by *Y, which is the same. */
1349
1350 static int
1351 size_htab_eq (const void *x, const void *y)
1352 {
1353 tree xt = (tree) x;
1354 tree yt = (tree) y;
1355
1356 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1357 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1358 && TREE_TYPE (xt) == TREE_TYPE (yt)
1359 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1360 }
1361 \f
1362 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1363 bits are given by NUMBER and of the sizetype represented by KIND. */
1364
1365 tree
1366 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1367 {
1368 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1369 }
1370
1371 /* Likewise, but the desired type is specified explicitly. */
1372
1373 static GTY (()) tree new_const;
1374 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1375 htab_t size_htab;
1376
1377 tree
1378 size_int_type_wide (HOST_WIDE_INT number, tree type)
1379 {
1380 void **slot;
1381
1382 if (size_htab == 0)
1383 {
1384 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1385 new_const = make_node (INTEGER_CST);
1386 }
1387
1388 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1389 hash table, we return the value from the hash table. Otherwise, we
1390 place that in the hash table and make a new node for the next time. */
1391 TREE_INT_CST_LOW (new_const) = number;
1392 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1393 TREE_TYPE (new_const) = type;
1394 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1395 = force_fit_type (new_const, 0);
1396
1397 slot = htab_find_slot (size_htab, new_const, INSERT);
1398 if (*slot == 0)
1399 {
1400 tree t = new_const;
1401
1402 *slot = new_const;
1403 new_const = make_node (INTEGER_CST);
1404 return t;
1405 }
1406 else
1407 return (tree) *slot;
1408 }
1409
1410 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1411 is a tree code. The type of the result is taken from the operands.
1412 Both must be the same type integer type and it must be a size type.
1413 If the operands are constant, so is the result. */
1414
1415 tree
1416 size_binop (enum tree_code code, tree arg0, tree arg1)
1417 {
1418 tree type = TREE_TYPE (arg0);
1419
1420 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1421 || type != TREE_TYPE (arg1))
1422 abort ();
1423
1424 /* Handle the special case of two integer constants faster. */
1425 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1426 {
1427 /* And some specific cases even faster than that. */
1428 if (code == PLUS_EXPR && integer_zerop (arg0))
1429 return arg1;
1430 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1431 && integer_zerop (arg1))
1432 return arg0;
1433 else if (code == MULT_EXPR && integer_onep (arg0))
1434 return arg1;
1435
1436 /* Handle general case of two integer constants. */
1437 return int_const_binop (code, arg0, arg1, 0);
1438 }
1439
1440 if (arg0 == error_mark_node || arg1 == error_mark_node)
1441 return error_mark_node;
1442
1443 return fold (build (code, type, arg0, arg1));
1444 }
1445
1446 /* Given two values, either both of sizetype or both of bitsizetype,
1447 compute the difference between the two values. Return the value
1448 in signed type corresponding to the type of the operands. */
1449
1450 tree
1451 size_diffop (tree arg0, tree arg1)
1452 {
1453 tree type = TREE_TYPE (arg0);
1454 tree ctype;
1455
1456 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1457 || type != TREE_TYPE (arg1))
1458 abort ();
1459
1460 /* If the type is already signed, just do the simple thing. */
1461 if (! TREE_UNSIGNED (type))
1462 return size_binop (MINUS_EXPR, arg0, arg1);
1463
1464 ctype = (type == bitsizetype || type == ubitsizetype
1465 ? sbitsizetype : ssizetype);
1466
1467 /* If either operand is not a constant, do the conversions to the signed
1468 type and subtract. The hardware will do the right thing with any
1469 overflow in the subtraction. */
1470 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1471 return size_binop (MINUS_EXPR, convert (ctype, arg0),
1472 convert (ctype, arg1));
1473
1474 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1475 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1476 overflow) and negate (which can't either). Special-case a result
1477 of zero while we're here. */
1478 if (tree_int_cst_equal (arg0, arg1))
1479 return convert (ctype, integer_zero_node);
1480 else if (tree_int_cst_lt (arg1, arg0))
1481 return convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1482 else
1483 return size_binop (MINUS_EXPR, convert (ctype, integer_zero_node),
1484 convert (ctype, size_binop (MINUS_EXPR, arg1, arg0)));
1485 }
1486 \f
1487
1488 /* Given T, a tree representing type conversion of ARG1, a constant,
1489 return a constant tree representing the result of conversion. */
1490
1491 static tree
1492 fold_convert (tree t, tree arg1)
1493 {
1494 tree type = TREE_TYPE (t);
1495 int overflow = 0;
1496
1497 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1498 {
1499 if (TREE_CODE (arg1) == INTEGER_CST)
1500 {
1501 /* If we would build a constant wider than GCC supports,
1502 leave the conversion unfolded. */
1503 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1504 return t;
1505
1506 /* If we are trying to make a sizetype for a small integer, use
1507 size_int to pick up cached types to reduce duplicate nodes. */
1508 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1509 && !TREE_CONSTANT_OVERFLOW (arg1)
1510 && compare_tree_int (arg1, 10000) < 0)
1511 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1512
1513 /* Given an integer constant, make new constant with new type,
1514 appropriately sign-extended or truncated. */
1515 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1516 TREE_INT_CST_HIGH (arg1));
1517 TREE_TYPE (t) = type;
1518 /* Indicate an overflow if (1) ARG1 already overflowed,
1519 or (2) force_fit_type indicates an overflow.
1520 Tell force_fit_type that an overflow has already occurred
1521 if ARG1 is a too-large unsigned value and T is signed.
1522 But don't indicate an overflow if converting a pointer. */
1523 TREE_OVERFLOW (t)
1524 = ((force_fit_type (t,
1525 (TREE_INT_CST_HIGH (arg1) < 0
1526 && (TREE_UNSIGNED (type)
1527 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1528 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1529 || TREE_OVERFLOW (arg1));
1530 TREE_CONSTANT_OVERFLOW (t)
1531 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1532 }
1533 else if (TREE_CODE (arg1) == REAL_CST)
1534 {
1535 /* Don't initialize these, use assignments.
1536 Initialized local aggregates don't work on old compilers. */
1537 REAL_VALUE_TYPE x;
1538 REAL_VALUE_TYPE l;
1539 REAL_VALUE_TYPE u;
1540 tree type1 = TREE_TYPE (arg1);
1541 int no_upper_bound;
1542
1543 x = TREE_REAL_CST (arg1);
1544 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1545
1546 no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
1547 if (!no_upper_bound)
1548 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1549
1550 /* See if X will be in range after truncation towards 0.
1551 To compensate for truncation, move the bounds away from 0,
1552 but reject if X exactly equals the adjusted bounds. */
1553 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1554 if (!no_upper_bound)
1555 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1556 /* If X is a NaN, use zero instead and show we have an overflow.
1557 Otherwise, range check. */
1558 if (REAL_VALUE_ISNAN (x))
1559 overflow = 1, x = dconst0;
1560 else if (! (REAL_VALUES_LESS (l, x)
1561 && !no_upper_bound
1562 && REAL_VALUES_LESS (x, u)))
1563 overflow = 1;
1564
1565 {
1566 HOST_WIDE_INT low, high;
1567 REAL_VALUE_TO_INT (&low, &high, x);
1568 t = build_int_2 (low, high);
1569 }
1570 TREE_TYPE (t) = type;
1571 TREE_OVERFLOW (t)
1572 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1573 TREE_CONSTANT_OVERFLOW (t)
1574 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1575 }
1576 TREE_TYPE (t) = type;
1577 }
1578 else if (TREE_CODE (type) == REAL_TYPE)
1579 {
1580 if (TREE_CODE (arg1) == INTEGER_CST)
1581 return build_real_from_int_cst (type, arg1);
1582 if (TREE_CODE (arg1) == REAL_CST)
1583 {
1584 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1585 {
1586 /* We make a copy of ARG1 so that we don't modify an
1587 existing constant tree. */
1588 t = copy_node (arg1);
1589 TREE_TYPE (t) = type;
1590 return t;
1591 }
1592
1593 t = build_real (type,
1594 real_value_truncate (TYPE_MODE (type),
1595 TREE_REAL_CST (arg1)));
1596
1597 TREE_OVERFLOW (t)
1598 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1599 TREE_CONSTANT_OVERFLOW (t)
1600 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1601 return t;
1602 }
1603 }
1604 TREE_CONSTANT (t) = 1;
1605 return t;
1606 }
1607 \f
1608 /* Return an expr equal to X but certainly not valid as an lvalue. */
1609
1610 tree
1611 non_lvalue (tree x)
1612 {
1613 tree result;
1614
1615 /* These things are certainly not lvalues. */
1616 if (TREE_CODE (x) == NON_LVALUE_EXPR
1617 || TREE_CODE (x) == INTEGER_CST
1618 || TREE_CODE (x) == REAL_CST
1619 || TREE_CODE (x) == STRING_CST
1620 || TREE_CODE (x) == ADDR_EXPR)
1621 return x;
1622
1623 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1624 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1625 return result;
1626 }
1627
1628 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1629 Zero means allow extended lvalues. */
1630
1631 int pedantic_lvalues;
1632
1633 /* When pedantic, return an expr equal to X but certainly not valid as a
1634 pedantic lvalue. Otherwise, return X. */
1635
1636 tree
1637 pedantic_non_lvalue (tree x)
1638 {
1639 if (pedantic_lvalues)
1640 return non_lvalue (x);
1641 else
1642 return x;
1643 }
1644 \f
1645 /* Given a tree comparison code, return the code that is the logical inverse
1646 of the given code. It is not safe to do this for floating-point
1647 comparisons, except for NE_EXPR and EQ_EXPR. */
1648
1649 static enum tree_code
1650 invert_tree_comparison (enum tree_code code)
1651 {
1652 switch (code)
1653 {
1654 case EQ_EXPR:
1655 return NE_EXPR;
1656 case NE_EXPR:
1657 return EQ_EXPR;
1658 case GT_EXPR:
1659 return LE_EXPR;
1660 case GE_EXPR:
1661 return LT_EXPR;
1662 case LT_EXPR:
1663 return GE_EXPR;
1664 case LE_EXPR:
1665 return GT_EXPR;
1666 default:
1667 abort ();
1668 }
1669 }
1670
1671 /* Similar, but return the comparison that results if the operands are
1672 swapped. This is safe for floating-point. */
1673
1674 static enum tree_code
1675 swap_tree_comparison (enum tree_code code)
1676 {
1677 switch (code)
1678 {
1679 case EQ_EXPR:
1680 case NE_EXPR:
1681 return code;
1682 case GT_EXPR:
1683 return LT_EXPR;
1684 case GE_EXPR:
1685 return LE_EXPR;
1686 case LT_EXPR:
1687 return GT_EXPR;
1688 case LE_EXPR:
1689 return GE_EXPR;
1690 default:
1691 abort ();
1692 }
1693 }
1694
1695
1696 /* Convert a comparison tree code from an enum tree_code representation
1697 into a compcode bit-based encoding. This function is the inverse of
1698 compcode_to_comparison. */
1699
1700 static int
1701 comparison_to_compcode (enum tree_code code)
1702 {
1703 switch (code)
1704 {
1705 case LT_EXPR:
1706 return COMPCODE_LT;
1707 case EQ_EXPR:
1708 return COMPCODE_EQ;
1709 case LE_EXPR:
1710 return COMPCODE_LE;
1711 case GT_EXPR:
1712 return COMPCODE_GT;
1713 case NE_EXPR:
1714 return COMPCODE_NE;
1715 case GE_EXPR:
1716 return COMPCODE_GE;
1717 default:
1718 abort ();
1719 }
1720 }
1721
1722 /* Convert a compcode bit-based encoding of a comparison operator back
1723 to GCC's enum tree_code representation. This function is the
1724 inverse of comparison_to_compcode. */
1725
1726 static enum tree_code
1727 compcode_to_comparison (int code)
1728 {
1729 switch (code)
1730 {
1731 case COMPCODE_LT:
1732 return LT_EXPR;
1733 case COMPCODE_EQ:
1734 return EQ_EXPR;
1735 case COMPCODE_LE:
1736 return LE_EXPR;
1737 case COMPCODE_GT:
1738 return GT_EXPR;
1739 case COMPCODE_NE:
1740 return NE_EXPR;
1741 case COMPCODE_GE:
1742 return GE_EXPR;
1743 default:
1744 abort ();
1745 }
1746 }
1747
1748 /* Return nonzero if CODE is a tree code that represents a truth value. */
1749
1750 static int
1751 truth_value_p (enum tree_code code)
1752 {
1753 return (TREE_CODE_CLASS (code) == '<'
1754 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1755 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1756 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1757 }
1758 \f
1759 /* Return nonzero if two operands are necessarily equal.
1760 If ONLY_CONST is nonzero, only return nonzero for constants.
1761 This function tests whether the operands are indistinguishable;
1762 it does not test whether they are equal using C's == operation.
1763 The distinction is important for IEEE floating point, because
1764 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1765 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1766
1767 int
1768 operand_equal_p (tree arg0, tree arg1, int only_const)
1769 {
1770 /* If both types don't have the same signedness, then we can't consider
1771 them equal. We must check this before the STRIP_NOPS calls
1772 because they may change the signedness of the arguments. */
1773 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1774 return 0;
1775
1776 STRIP_NOPS (arg0);
1777 STRIP_NOPS (arg1);
1778
1779 if (TREE_CODE (arg0) != TREE_CODE (arg1)
1780 /* This is needed for conversions and for COMPONENT_REF.
1781 Might as well play it safe and always test this. */
1782 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
1783 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
1784 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1785 return 0;
1786
1787 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1788 We don't care about side effects in that case because the SAVE_EXPR
1789 takes care of that for us. In all other cases, two expressions are
1790 equal if they have no side effects. If we have two identical
1791 expressions with side effects that should be treated the same due
1792 to the only side effects being identical SAVE_EXPR's, that will
1793 be detected in the recursive calls below. */
1794 if (arg0 == arg1 && ! only_const
1795 && (TREE_CODE (arg0) == SAVE_EXPR
1796 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
1797 return 1;
1798
1799 /* Next handle constant cases, those for which we can return 1 even
1800 if ONLY_CONST is set. */
1801 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
1802 switch (TREE_CODE (arg0))
1803 {
1804 case INTEGER_CST:
1805 return (! TREE_CONSTANT_OVERFLOW (arg0)
1806 && ! TREE_CONSTANT_OVERFLOW (arg1)
1807 && tree_int_cst_equal (arg0, arg1));
1808
1809 case REAL_CST:
1810 return (! TREE_CONSTANT_OVERFLOW (arg0)
1811 && ! TREE_CONSTANT_OVERFLOW (arg1)
1812 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
1813 TREE_REAL_CST (arg1)));
1814
1815 case VECTOR_CST:
1816 {
1817 tree v1, v2;
1818
1819 if (TREE_CONSTANT_OVERFLOW (arg0)
1820 || TREE_CONSTANT_OVERFLOW (arg1))
1821 return 0;
1822
1823 v1 = TREE_VECTOR_CST_ELTS (arg0);
1824 v2 = TREE_VECTOR_CST_ELTS (arg1);
1825 while (v1 && v2)
1826 {
1827 if (!operand_equal_p (v1, v2, only_const))
1828 return 0;
1829 v1 = TREE_CHAIN (v1);
1830 v2 = TREE_CHAIN (v2);
1831 }
1832
1833 return 1;
1834 }
1835
1836 case COMPLEX_CST:
1837 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
1838 only_const)
1839 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
1840 only_const));
1841
1842 case STRING_CST:
1843 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
1844 && ! memcmp (TREE_STRING_POINTER (arg0),
1845 TREE_STRING_POINTER (arg1),
1846 TREE_STRING_LENGTH (arg0)));
1847
1848 case ADDR_EXPR:
1849 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
1850 0);
1851 default:
1852 break;
1853 }
1854
1855 if (only_const)
1856 return 0;
1857
1858 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1859 {
1860 case '1':
1861 /* Two conversions are equal only if signedness and modes match. */
1862 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1863 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1864 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1865 return 0;
1866
1867 return operand_equal_p (TREE_OPERAND (arg0, 0),
1868 TREE_OPERAND (arg1, 0), 0);
1869
1870 case '<':
1871 case '2':
1872 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
1873 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
1874 0))
1875 return 1;
1876
1877 /* For commutative ops, allow the other order. */
1878 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
1879 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
1880 || TREE_CODE (arg0) == BIT_IOR_EXPR
1881 || TREE_CODE (arg0) == BIT_XOR_EXPR
1882 || TREE_CODE (arg0) == BIT_AND_EXPR
1883 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
1884 && operand_equal_p (TREE_OPERAND (arg0, 0),
1885 TREE_OPERAND (arg1, 1), 0)
1886 && operand_equal_p (TREE_OPERAND (arg0, 1),
1887 TREE_OPERAND (arg1, 0), 0));
1888
1889 case 'r':
1890 /* If either of the pointer (or reference) expressions we are
1891 dereferencing contain a side effect, these cannot be equal. */
1892 if (TREE_SIDE_EFFECTS (arg0)
1893 || TREE_SIDE_EFFECTS (arg1))
1894 return 0;
1895
1896 switch (TREE_CODE (arg0))
1897 {
1898 case INDIRECT_REF:
1899 return operand_equal_p (TREE_OPERAND (arg0, 0),
1900 TREE_OPERAND (arg1, 0), 0);
1901
1902 case COMPONENT_REF:
1903 case ARRAY_REF:
1904 case ARRAY_RANGE_REF:
1905 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1906 TREE_OPERAND (arg1, 0), 0)
1907 && operand_equal_p (TREE_OPERAND (arg0, 1),
1908 TREE_OPERAND (arg1, 1), 0));
1909
1910 case BIT_FIELD_REF:
1911 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1912 TREE_OPERAND (arg1, 0), 0)
1913 && operand_equal_p (TREE_OPERAND (arg0, 1),
1914 TREE_OPERAND (arg1, 1), 0)
1915 && operand_equal_p (TREE_OPERAND (arg0, 2),
1916 TREE_OPERAND (arg1, 2), 0));
1917 default:
1918 return 0;
1919 }
1920
1921 case 'e':
1922 switch (TREE_CODE (arg0))
1923 {
1924 case ADDR_EXPR:
1925 case TRUTH_NOT_EXPR:
1926 return operand_equal_p (TREE_OPERAND (arg0, 0),
1927 TREE_OPERAND (arg1, 0), 0);
1928
1929 case RTL_EXPR:
1930 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
1931
1932 case CALL_EXPR:
1933 /* If the CALL_EXPRs call different functions, then they
1934 clearly can not be equal. */
1935 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
1936 TREE_OPERAND (arg1, 0), 0))
1937 return 0;
1938
1939 /* Only consider const functions equivalent. */
1940 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR)
1941 {
1942 tree fndecl = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
1943 if (! (flags_from_decl_or_type (fndecl) & ECF_CONST))
1944 return 0;
1945 }
1946 else
1947 return 0;
1948
1949 /* Now see if all the arguments are the same. operand_equal_p
1950 does not handle TREE_LIST, so we walk the operands here
1951 feeding them to operand_equal_p. */
1952 arg0 = TREE_OPERAND (arg0, 1);
1953 arg1 = TREE_OPERAND (arg1, 1);
1954 while (arg0 && arg1)
1955 {
1956 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), 0))
1957 return 0;
1958
1959 arg0 = TREE_CHAIN (arg0);
1960 arg1 = TREE_CHAIN (arg1);
1961 }
1962
1963 /* If we get here and both argument lists are exhausted
1964 then the CALL_EXPRs are equal. */
1965 return ! (arg0 || arg1);
1966
1967 default:
1968 return 0;
1969 }
1970
1971 case 'd':
1972 /* Consider __builtin_sqrt equal to sqrt. */
1973 return TREE_CODE (arg0) == FUNCTION_DECL
1974 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
1975 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
1976 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1);
1977
1978 default:
1979 return 0;
1980 }
1981 }
1982 \f
1983 /* Similar to operand_equal_p, but see if ARG0 might have been made by
1984 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1985
1986 When in doubt, return 0. */
1987
1988 static int
1989 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
1990 {
1991 int unsignedp1, unsignedpo;
1992 tree primarg0, primarg1, primother;
1993 unsigned int correct_width;
1994
1995 if (operand_equal_p (arg0, arg1, 0))
1996 return 1;
1997
1998 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
1999 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2000 return 0;
2001
2002 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2003 and see if the inner values are the same. This removes any
2004 signedness comparison, which doesn't matter here. */
2005 primarg0 = arg0, primarg1 = arg1;
2006 STRIP_NOPS (primarg0);
2007 STRIP_NOPS (primarg1);
2008 if (operand_equal_p (primarg0, primarg1, 0))
2009 return 1;
2010
2011 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2012 actual comparison operand, ARG0.
2013
2014 First throw away any conversions to wider types
2015 already present in the operands. */
2016
2017 primarg1 = get_narrower (arg1, &unsignedp1);
2018 primother = get_narrower (other, &unsignedpo);
2019
2020 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2021 if (unsignedp1 == unsignedpo
2022 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2023 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2024 {
2025 tree type = TREE_TYPE (arg0);
2026
2027 /* Make sure shorter operand is extended the right way
2028 to match the longer operand. */
2029 primarg1 = convert ((*lang_hooks.types.signed_or_unsigned_type)
2030 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2031
2032 if (operand_equal_p (arg0, convert (type, primarg1), 0))
2033 return 1;
2034 }
2035
2036 return 0;
2037 }
2038 \f
2039 /* See if ARG is an expression that is either a comparison or is performing
2040 arithmetic on comparisons. The comparisons must only be comparing
2041 two different values, which will be stored in *CVAL1 and *CVAL2; if
2042 they are nonzero it means that some operands have already been found.
2043 No variables may be used anywhere else in the expression except in the
2044 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2045 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2046
2047 If this is true, return 1. Otherwise, return zero. */
2048
2049 static int
2050 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2051 {
2052 enum tree_code code = TREE_CODE (arg);
2053 char class = TREE_CODE_CLASS (code);
2054
2055 /* We can handle some of the 'e' cases here. */
2056 if (class == 'e' && code == TRUTH_NOT_EXPR)
2057 class = '1';
2058 else if (class == 'e'
2059 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2060 || code == COMPOUND_EXPR))
2061 class = '2';
2062
2063 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2064 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2065 {
2066 /* If we've already found a CVAL1 or CVAL2, this expression is
2067 two complex to handle. */
2068 if (*cval1 || *cval2)
2069 return 0;
2070
2071 class = '1';
2072 *save_p = 1;
2073 }
2074
2075 switch (class)
2076 {
2077 case '1':
2078 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2079
2080 case '2':
2081 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2082 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2083 cval1, cval2, save_p));
2084
2085 case 'c':
2086 return 1;
2087
2088 case 'e':
2089 if (code == COND_EXPR)
2090 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2091 cval1, cval2, save_p)
2092 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2093 cval1, cval2, save_p)
2094 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2095 cval1, cval2, save_p));
2096 return 0;
2097
2098 case '<':
2099 /* First see if we can handle the first operand, then the second. For
2100 the second operand, we know *CVAL1 can't be zero. It must be that
2101 one side of the comparison is each of the values; test for the
2102 case where this isn't true by failing if the two operands
2103 are the same. */
2104
2105 if (operand_equal_p (TREE_OPERAND (arg, 0),
2106 TREE_OPERAND (arg, 1), 0))
2107 return 0;
2108
2109 if (*cval1 == 0)
2110 *cval1 = TREE_OPERAND (arg, 0);
2111 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2112 ;
2113 else if (*cval2 == 0)
2114 *cval2 = TREE_OPERAND (arg, 0);
2115 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2116 ;
2117 else
2118 return 0;
2119
2120 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2121 ;
2122 else if (*cval2 == 0)
2123 *cval2 = TREE_OPERAND (arg, 1);
2124 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2125 ;
2126 else
2127 return 0;
2128
2129 return 1;
2130
2131 default:
2132 return 0;
2133 }
2134 }
2135 \f
2136 /* ARG is a tree that is known to contain just arithmetic operations and
2137 comparisons. Evaluate the operations in the tree substituting NEW0 for
2138 any occurrence of OLD0 as an operand of a comparison and likewise for
2139 NEW1 and OLD1. */
2140
2141 static tree
2142 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2143 {
2144 tree type = TREE_TYPE (arg);
2145 enum tree_code code = TREE_CODE (arg);
2146 char class = TREE_CODE_CLASS (code);
2147
2148 /* We can handle some of the 'e' cases here. */
2149 if (class == 'e' && code == TRUTH_NOT_EXPR)
2150 class = '1';
2151 else if (class == 'e'
2152 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2153 class = '2';
2154
2155 switch (class)
2156 {
2157 case '1':
2158 return fold (build1 (code, type,
2159 eval_subst (TREE_OPERAND (arg, 0),
2160 old0, new0, old1, new1)));
2161
2162 case '2':
2163 return fold (build (code, type,
2164 eval_subst (TREE_OPERAND (arg, 0),
2165 old0, new0, old1, new1),
2166 eval_subst (TREE_OPERAND (arg, 1),
2167 old0, new0, old1, new1)));
2168
2169 case 'e':
2170 switch (code)
2171 {
2172 case SAVE_EXPR:
2173 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2174
2175 case COMPOUND_EXPR:
2176 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2177
2178 case COND_EXPR:
2179 return fold (build (code, type,
2180 eval_subst (TREE_OPERAND (arg, 0),
2181 old0, new0, old1, new1),
2182 eval_subst (TREE_OPERAND (arg, 1),
2183 old0, new0, old1, new1),
2184 eval_subst (TREE_OPERAND (arg, 2),
2185 old0, new0, old1, new1)));
2186 default:
2187 break;
2188 }
2189 /* fall through - ??? */
2190
2191 case '<':
2192 {
2193 tree arg0 = TREE_OPERAND (arg, 0);
2194 tree arg1 = TREE_OPERAND (arg, 1);
2195
2196 /* We need to check both for exact equality and tree equality. The
2197 former will be true if the operand has a side-effect. In that
2198 case, we know the operand occurred exactly once. */
2199
2200 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2201 arg0 = new0;
2202 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2203 arg0 = new1;
2204
2205 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2206 arg1 = new0;
2207 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2208 arg1 = new1;
2209
2210 return fold (build (code, type, arg0, arg1));
2211 }
2212
2213 default:
2214 return arg;
2215 }
2216 }
2217 \f
2218 /* Return a tree for the case when the result of an expression is RESULT
2219 converted to TYPE and OMITTED was previously an operand of the expression
2220 but is now not needed (e.g., we folded OMITTED * 0).
2221
2222 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2223 the conversion of RESULT to TYPE. */
2224
2225 tree
2226 omit_one_operand (tree type, tree result, tree omitted)
2227 {
2228 tree t = convert (type, result);
2229
2230 if (TREE_SIDE_EFFECTS (omitted))
2231 return build (COMPOUND_EXPR, type, omitted, t);
2232
2233 return non_lvalue (t);
2234 }
2235
2236 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2237
2238 static tree
2239 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2240 {
2241 tree t = convert (type, result);
2242
2243 if (TREE_SIDE_EFFECTS (omitted))
2244 return build (COMPOUND_EXPR, type, omitted, t);
2245
2246 return pedantic_non_lvalue (t);
2247 }
2248 \f
2249 /* Return a simplified tree node for the truth-negation of ARG. This
2250 never alters ARG itself. We assume that ARG is an operation that
2251 returns a truth value (0 or 1). */
2252
2253 tree
2254 invert_truthvalue (tree arg)
2255 {
2256 tree type = TREE_TYPE (arg);
2257 enum tree_code code = TREE_CODE (arg);
2258
2259 if (code == ERROR_MARK)
2260 return arg;
2261
2262 /* If this is a comparison, we can simply invert it, except for
2263 floating-point non-equality comparisons, in which case we just
2264 enclose a TRUTH_NOT_EXPR around what we have. */
2265
2266 if (TREE_CODE_CLASS (code) == '<')
2267 {
2268 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2269 && !flag_unsafe_math_optimizations
2270 && code != NE_EXPR
2271 && code != EQ_EXPR)
2272 return build1 (TRUTH_NOT_EXPR, type, arg);
2273 else
2274 return build (invert_tree_comparison (code), type,
2275 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2276 }
2277
2278 switch (code)
2279 {
2280 case INTEGER_CST:
2281 return convert (type, build_int_2 (integer_zerop (arg), 0));
2282
2283 case TRUTH_AND_EXPR:
2284 return build (TRUTH_OR_EXPR, type,
2285 invert_truthvalue (TREE_OPERAND (arg, 0)),
2286 invert_truthvalue (TREE_OPERAND (arg, 1)));
2287
2288 case TRUTH_OR_EXPR:
2289 return build (TRUTH_AND_EXPR, type,
2290 invert_truthvalue (TREE_OPERAND (arg, 0)),
2291 invert_truthvalue (TREE_OPERAND (arg, 1)));
2292
2293 case TRUTH_XOR_EXPR:
2294 /* Here we can invert either operand. We invert the first operand
2295 unless the second operand is a TRUTH_NOT_EXPR in which case our
2296 result is the XOR of the first operand with the inside of the
2297 negation of the second operand. */
2298
2299 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2300 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2301 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2302 else
2303 return build (TRUTH_XOR_EXPR, type,
2304 invert_truthvalue (TREE_OPERAND (arg, 0)),
2305 TREE_OPERAND (arg, 1));
2306
2307 case TRUTH_ANDIF_EXPR:
2308 return build (TRUTH_ORIF_EXPR, type,
2309 invert_truthvalue (TREE_OPERAND (arg, 0)),
2310 invert_truthvalue (TREE_OPERAND (arg, 1)));
2311
2312 case TRUTH_ORIF_EXPR:
2313 return build (TRUTH_ANDIF_EXPR, type,
2314 invert_truthvalue (TREE_OPERAND (arg, 0)),
2315 invert_truthvalue (TREE_OPERAND (arg, 1)));
2316
2317 case TRUTH_NOT_EXPR:
2318 return TREE_OPERAND (arg, 0);
2319
2320 case COND_EXPR:
2321 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2322 invert_truthvalue (TREE_OPERAND (arg, 1)),
2323 invert_truthvalue (TREE_OPERAND (arg, 2)));
2324
2325 case COMPOUND_EXPR:
2326 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2327 invert_truthvalue (TREE_OPERAND (arg, 1)));
2328
2329 case WITH_RECORD_EXPR:
2330 return build (WITH_RECORD_EXPR, type,
2331 invert_truthvalue (TREE_OPERAND (arg, 0)),
2332 TREE_OPERAND (arg, 1));
2333
2334 case NON_LVALUE_EXPR:
2335 return invert_truthvalue (TREE_OPERAND (arg, 0));
2336
2337 case NOP_EXPR:
2338 case CONVERT_EXPR:
2339 case FLOAT_EXPR:
2340 return build1 (TREE_CODE (arg), type,
2341 invert_truthvalue (TREE_OPERAND (arg, 0)));
2342
2343 case BIT_AND_EXPR:
2344 if (!integer_onep (TREE_OPERAND (arg, 1)))
2345 break;
2346 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2347
2348 case SAVE_EXPR:
2349 return build1 (TRUTH_NOT_EXPR, type, arg);
2350
2351 case CLEANUP_POINT_EXPR:
2352 return build1 (CLEANUP_POINT_EXPR, type,
2353 invert_truthvalue (TREE_OPERAND (arg, 0)));
2354
2355 default:
2356 break;
2357 }
2358 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2359 abort ();
2360 return build1 (TRUTH_NOT_EXPR, type, arg);
2361 }
2362
2363 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2364 operands are another bit-wise operation with a common input. If so,
2365 distribute the bit operations to save an operation and possibly two if
2366 constants are involved. For example, convert
2367 (A | B) & (A | C) into A | (B & C)
2368 Further simplification will occur if B and C are constants.
2369
2370 If this optimization cannot be done, 0 will be returned. */
2371
2372 static tree
2373 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2374 {
2375 tree common;
2376 tree left, right;
2377
2378 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2379 || TREE_CODE (arg0) == code
2380 || (TREE_CODE (arg0) != BIT_AND_EXPR
2381 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2382 return 0;
2383
2384 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2385 {
2386 common = TREE_OPERAND (arg0, 0);
2387 left = TREE_OPERAND (arg0, 1);
2388 right = TREE_OPERAND (arg1, 1);
2389 }
2390 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2391 {
2392 common = TREE_OPERAND (arg0, 0);
2393 left = TREE_OPERAND (arg0, 1);
2394 right = TREE_OPERAND (arg1, 0);
2395 }
2396 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2397 {
2398 common = TREE_OPERAND (arg0, 1);
2399 left = TREE_OPERAND (arg0, 0);
2400 right = TREE_OPERAND (arg1, 1);
2401 }
2402 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2403 {
2404 common = TREE_OPERAND (arg0, 1);
2405 left = TREE_OPERAND (arg0, 0);
2406 right = TREE_OPERAND (arg1, 0);
2407 }
2408 else
2409 return 0;
2410
2411 return fold (build (TREE_CODE (arg0), type, common,
2412 fold (build (code, type, left, right))));
2413 }
2414 \f
2415 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2416 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2417
2418 static tree
2419 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos, int unsignedp)
2420 {
2421 tree result = build (BIT_FIELD_REF, type, inner,
2422 size_int (bitsize), bitsize_int (bitpos));
2423
2424 TREE_UNSIGNED (result) = unsignedp;
2425
2426 return result;
2427 }
2428
2429 /* Optimize a bit-field compare.
2430
2431 There are two cases: First is a compare against a constant and the
2432 second is a comparison of two items where the fields are at the same
2433 bit position relative to the start of a chunk (byte, halfword, word)
2434 large enough to contain it. In these cases we can avoid the shift
2435 implicit in bitfield extractions.
2436
2437 For constants, we emit a compare of the shifted constant with the
2438 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2439 compared. For two fields at the same position, we do the ANDs with the
2440 similar mask and compare the result of the ANDs.
2441
2442 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2443 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2444 are the left and right operands of the comparison, respectively.
2445
2446 If the optimization described above can be done, we return the resulting
2447 tree. Otherwise we return zero. */
2448
2449 static tree
2450 optimize_bit_field_compare (enum tree_code code, tree compare_type, tree lhs, tree rhs)
2451 {
2452 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2453 tree type = TREE_TYPE (lhs);
2454 tree signed_type, unsigned_type;
2455 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2456 enum machine_mode lmode, rmode, nmode;
2457 int lunsignedp, runsignedp;
2458 int lvolatilep = 0, rvolatilep = 0;
2459 tree linner, rinner = NULL_TREE;
2460 tree mask;
2461 tree offset;
2462
2463 /* Get all the information about the extractions being done. If the bit size
2464 if the same as the size of the underlying object, we aren't doing an
2465 extraction at all and so can do nothing. We also don't want to
2466 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2467 then will no longer be able to replace it. */
2468 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2469 &lunsignedp, &lvolatilep);
2470 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2471 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2472 return 0;
2473
2474 if (!const_p)
2475 {
2476 /* If this is not a constant, we can only do something if bit positions,
2477 sizes, and signedness are the same. */
2478 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2479 &runsignedp, &rvolatilep);
2480
2481 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2482 || lunsignedp != runsignedp || offset != 0
2483 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2484 return 0;
2485 }
2486
2487 /* See if we can find a mode to refer to this field. We should be able to,
2488 but fail if we can't. */
2489 nmode = get_best_mode (lbitsize, lbitpos,
2490 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2491 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2492 TYPE_ALIGN (TREE_TYPE (rinner))),
2493 word_mode, lvolatilep || rvolatilep);
2494 if (nmode == VOIDmode)
2495 return 0;
2496
2497 /* Set signed and unsigned types of the precision of this mode for the
2498 shifts below. */
2499 signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2500 unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2501
2502 /* Compute the bit position and size for the new reference and our offset
2503 within it. If the new reference is the same size as the original, we
2504 won't optimize anything, so return zero. */
2505 nbitsize = GET_MODE_BITSIZE (nmode);
2506 nbitpos = lbitpos & ~ (nbitsize - 1);
2507 lbitpos -= nbitpos;
2508 if (nbitsize == lbitsize)
2509 return 0;
2510
2511 if (BYTES_BIG_ENDIAN)
2512 lbitpos = nbitsize - lbitsize - lbitpos;
2513
2514 /* Make the mask to be used against the extracted field. */
2515 mask = build_int_2 (~0, ~0);
2516 TREE_TYPE (mask) = unsigned_type;
2517 force_fit_type (mask, 0);
2518 mask = convert (unsigned_type, mask);
2519 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2520 mask = const_binop (RSHIFT_EXPR, mask,
2521 size_int (nbitsize - lbitsize - lbitpos), 0);
2522
2523 if (! const_p)
2524 /* If not comparing with constant, just rework the comparison
2525 and return. */
2526 return build (code, compare_type,
2527 build (BIT_AND_EXPR, unsigned_type,
2528 make_bit_field_ref (linner, unsigned_type,
2529 nbitsize, nbitpos, 1),
2530 mask),
2531 build (BIT_AND_EXPR, unsigned_type,
2532 make_bit_field_ref (rinner, unsigned_type,
2533 nbitsize, nbitpos, 1),
2534 mask));
2535
2536 /* Otherwise, we are handling the constant case. See if the constant is too
2537 big for the field. Warn and return a tree of for 0 (false) if so. We do
2538 this not only for its own sake, but to avoid having to test for this
2539 error case below. If we didn't, we might generate wrong code.
2540
2541 For unsigned fields, the constant shifted right by the field length should
2542 be all zero. For signed fields, the high-order bits should agree with
2543 the sign bit. */
2544
2545 if (lunsignedp)
2546 {
2547 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2548 convert (unsigned_type, rhs),
2549 size_int (lbitsize), 0)))
2550 {
2551 warning ("comparison is always %d due to width of bit-field",
2552 code == NE_EXPR);
2553 return convert (compare_type,
2554 (code == NE_EXPR
2555 ? integer_one_node : integer_zero_node));
2556 }
2557 }
2558 else
2559 {
2560 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2561 size_int (lbitsize - 1), 0);
2562 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2563 {
2564 warning ("comparison is always %d due to width of bit-field",
2565 code == NE_EXPR);
2566 return convert (compare_type,
2567 (code == NE_EXPR
2568 ? integer_one_node : integer_zero_node));
2569 }
2570 }
2571
2572 /* Single-bit compares should always be against zero. */
2573 if (lbitsize == 1 && ! integer_zerop (rhs))
2574 {
2575 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2576 rhs = convert (type, integer_zero_node);
2577 }
2578
2579 /* Make a new bitfield reference, shift the constant over the
2580 appropriate number of bits and mask it with the computed mask
2581 (in case this was a signed field). If we changed it, make a new one. */
2582 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2583 if (lvolatilep)
2584 {
2585 TREE_SIDE_EFFECTS (lhs) = 1;
2586 TREE_THIS_VOLATILE (lhs) = 1;
2587 }
2588
2589 rhs = fold (const_binop (BIT_AND_EXPR,
2590 const_binop (LSHIFT_EXPR,
2591 convert (unsigned_type, rhs),
2592 size_int (lbitpos), 0),
2593 mask, 0));
2594
2595 return build (code, compare_type,
2596 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2597 rhs);
2598 }
2599 \f
2600 /* Subroutine for fold_truthop: decode a field reference.
2601
2602 If EXP is a comparison reference, we return the innermost reference.
2603
2604 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2605 set to the starting bit number.
2606
2607 If the innermost field can be completely contained in a mode-sized
2608 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2609
2610 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2611 otherwise it is not changed.
2612
2613 *PUNSIGNEDP is set to the signedness of the field.
2614
2615 *PMASK is set to the mask used. This is either contained in a
2616 BIT_AND_EXPR or derived from the width of the field.
2617
2618 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2619
2620 Return 0 if this is not a component reference or is one that we can't
2621 do anything with. */
2622
2623 static tree
2624 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize, HOST_WIDE_INT *pbitpos,
2625 enum machine_mode *pmode, int *punsignedp, int *pvolatilep,
2626 tree *pmask, tree *pand_mask)
2627 {
2628 tree outer_type = 0;
2629 tree and_mask = 0;
2630 tree mask, inner, offset;
2631 tree unsigned_type;
2632 unsigned int precision;
2633
2634 /* All the optimizations using this function assume integer fields.
2635 There are problems with FP fields since the type_for_size call
2636 below can fail for, e.g., XFmode. */
2637 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2638 return 0;
2639
2640 /* We are interested in the bare arrangement of bits, so strip everything
2641 that doesn't affect the machine mode. However, record the type of the
2642 outermost expression if it may matter below. */
2643 if (TREE_CODE (exp) == NOP_EXPR
2644 || TREE_CODE (exp) == CONVERT_EXPR
2645 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2646 outer_type = TREE_TYPE (exp);
2647 STRIP_NOPS (exp);
2648
2649 if (TREE_CODE (exp) == BIT_AND_EXPR)
2650 {
2651 and_mask = TREE_OPERAND (exp, 1);
2652 exp = TREE_OPERAND (exp, 0);
2653 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2654 if (TREE_CODE (and_mask) != INTEGER_CST)
2655 return 0;
2656 }
2657
2658 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2659 punsignedp, pvolatilep);
2660 if ((inner == exp && and_mask == 0)
2661 || *pbitsize < 0 || offset != 0
2662 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
2663 return 0;
2664
2665 /* If the number of bits in the reference is the same as the bitsize of
2666 the outer type, then the outer type gives the signedness. Otherwise
2667 (in case of a small bitfield) the signedness is unchanged. */
2668 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
2669 *punsignedp = TREE_UNSIGNED (outer_type);
2670
2671 /* Compute the mask to access the bitfield. */
2672 unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
2673 precision = TYPE_PRECISION (unsigned_type);
2674
2675 mask = build_int_2 (~0, ~0);
2676 TREE_TYPE (mask) = unsigned_type;
2677 force_fit_type (mask, 0);
2678 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2679 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2680
2681 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2682 if (and_mask != 0)
2683 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2684 convert (unsigned_type, and_mask), mask));
2685
2686 *pmask = mask;
2687 *pand_mask = and_mask;
2688 return inner;
2689 }
2690
2691 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
2692 bit positions. */
2693
2694 static int
2695 all_ones_mask_p (tree mask, int size)
2696 {
2697 tree type = TREE_TYPE (mask);
2698 unsigned int precision = TYPE_PRECISION (type);
2699 tree tmask;
2700
2701 tmask = build_int_2 (~0, ~0);
2702 TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
2703 force_fit_type (tmask, 0);
2704 return
2705 tree_int_cst_equal (mask,
2706 const_binop (RSHIFT_EXPR,
2707 const_binop (LSHIFT_EXPR, tmask,
2708 size_int (precision - size),
2709 0),
2710 size_int (precision - size), 0));
2711 }
2712
2713 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
2714 represents the sign bit of EXP's type. If EXP represents a sign
2715 or zero extension, also test VAL against the unextended type.
2716 The return value is the (sub)expression whose sign bit is VAL,
2717 or NULL_TREE otherwise. */
2718
2719 static tree
2720 sign_bit_p (tree exp, tree val)
2721 {
2722 unsigned HOST_WIDE_INT lo;
2723 HOST_WIDE_INT hi;
2724 int width;
2725 tree t;
2726
2727 /* Tree EXP must have an integral type. */
2728 t = TREE_TYPE (exp);
2729 if (! INTEGRAL_TYPE_P (t))
2730 return NULL_TREE;
2731
2732 /* Tree VAL must be an integer constant. */
2733 if (TREE_CODE (val) != INTEGER_CST
2734 || TREE_CONSTANT_OVERFLOW (val))
2735 return NULL_TREE;
2736
2737 width = TYPE_PRECISION (t);
2738 if (width > HOST_BITS_PER_WIDE_INT)
2739 {
2740 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
2741 lo = 0;
2742 }
2743 else
2744 {
2745 hi = 0;
2746 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
2747 }
2748
2749 if (TREE_INT_CST_HIGH (val) == hi && TREE_INT_CST_LOW (val) == lo)
2750 return exp;
2751
2752 /* Handle extension from a narrower type. */
2753 if (TREE_CODE (exp) == NOP_EXPR
2754 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
2755 return sign_bit_p (TREE_OPERAND (exp, 0), val);
2756
2757 return NULL_TREE;
2758 }
2759
2760 /* Subroutine for fold_truthop: determine if an operand is simple enough
2761 to be evaluated unconditionally. */
2762
2763 static int
2764 simple_operand_p (tree exp)
2765 {
2766 /* Strip any conversions that don't change the machine mode. */
2767 while ((TREE_CODE (exp) == NOP_EXPR
2768 || TREE_CODE (exp) == CONVERT_EXPR)
2769 && (TYPE_MODE (TREE_TYPE (exp))
2770 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2771 exp = TREE_OPERAND (exp, 0);
2772
2773 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2774 || (DECL_P (exp)
2775 && ! TREE_ADDRESSABLE (exp)
2776 && ! TREE_THIS_VOLATILE (exp)
2777 && ! DECL_NONLOCAL (exp)
2778 /* Don't regard global variables as simple. They may be
2779 allocated in ways unknown to the compiler (shared memory,
2780 #pragma weak, etc). */
2781 && ! TREE_PUBLIC (exp)
2782 && ! DECL_EXTERNAL (exp)
2783 /* Loading a static variable is unduly expensive, but global
2784 registers aren't expensive. */
2785 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2786 }
2787 \f
2788 /* The following functions are subroutines to fold_range_test and allow it to
2789 try to change a logical combination of comparisons into a range test.
2790
2791 For example, both
2792 X == 2 || X == 3 || X == 4 || X == 5
2793 and
2794 X >= 2 && X <= 5
2795 are converted to
2796 (unsigned) (X - 2) <= 3
2797
2798 We describe each set of comparisons as being either inside or outside
2799 a range, using a variable named like IN_P, and then describe the
2800 range with a lower and upper bound. If one of the bounds is omitted,
2801 it represents either the highest or lowest value of the type.
2802
2803 In the comments below, we represent a range by two numbers in brackets
2804 preceded by a "+" to designate being inside that range, or a "-" to
2805 designate being outside that range, so the condition can be inverted by
2806 flipping the prefix. An omitted bound is represented by a "-". For
2807 example, "- [-, 10]" means being outside the range starting at the lowest
2808 possible value and ending at 10, in other words, being greater than 10.
2809 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
2810 always false.
2811
2812 We set up things so that the missing bounds are handled in a consistent
2813 manner so neither a missing bound nor "true" and "false" need to be
2814 handled using a special case. */
2815
2816 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
2817 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
2818 and UPPER1_P are nonzero if the respective argument is an upper bound
2819 and zero for a lower. TYPE, if nonzero, is the type of the result; it
2820 must be specified for a comparison. ARG1 will be converted to ARG0's
2821 type if both are specified. */
2822
2823 static tree
2824 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p, tree arg1,
2825 int upper1_p)
2826 {
2827 tree tem;
2828 int result;
2829 int sgn0, sgn1;
2830
2831 /* If neither arg represents infinity, do the normal operation.
2832 Else, if not a comparison, return infinity. Else handle the special
2833 comparison rules. Note that most of the cases below won't occur, but
2834 are handled for consistency. */
2835
2836 if (arg0 != 0 && arg1 != 0)
2837 {
2838 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
2839 arg0, convert (TREE_TYPE (arg0), arg1)));
2840 STRIP_NOPS (tem);
2841 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
2842 }
2843
2844 if (TREE_CODE_CLASS (code) != '<')
2845 return 0;
2846
2847 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
2848 for neither. In real maths, we cannot assume open ended ranges are
2849 the same. But, this is computer arithmetic, where numbers are finite.
2850 We can therefore make the transformation of any unbounded range with
2851 the value Z, Z being greater than any representable number. This permits
2852 us to treat unbounded ranges as equal. */
2853 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
2854 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
2855 switch (code)
2856 {
2857 case EQ_EXPR:
2858 result = sgn0 == sgn1;
2859 break;
2860 case NE_EXPR:
2861 result = sgn0 != sgn1;
2862 break;
2863 case LT_EXPR:
2864 result = sgn0 < sgn1;
2865 break;
2866 case LE_EXPR:
2867 result = sgn0 <= sgn1;
2868 break;
2869 case GT_EXPR:
2870 result = sgn0 > sgn1;
2871 break;
2872 case GE_EXPR:
2873 result = sgn0 >= sgn1;
2874 break;
2875 default:
2876 abort ();
2877 }
2878
2879 return convert (type, result ? integer_one_node : integer_zero_node);
2880 }
2881 \f
2882 /* Given EXP, a logical expression, set the range it is testing into
2883 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
2884 actually being tested. *PLOW and *PHIGH will be made of the same type
2885 as the returned expression. If EXP is not a comparison, we will most
2886 likely not be returning a useful value and range. */
2887
2888 static tree
2889 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
2890 {
2891 enum tree_code code;
2892 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
2893 tree orig_type = NULL_TREE;
2894 int in_p, n_in_p;
2895 tree low, high, n_low, n_high;
2896
2897 /* Start with simply saying "EXP != 0" and then look at the code of EXP
2898 and see if we can refine the range. Some of the cases below may not
2899 happen, but it doesn't seem worth worrying about this. We "continue"
2900 the outer loop when we've changed something; otherwise we "break"
2901 the switch, which will "break" the while. */
2902
2903 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
2904
2905 while (1)
2906 {
2907 code = TREE_CODE (exp);
2908
2909 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
2910 {
2911 if (first_rtl_op (code) > 0)
2912 arg0 = TREE_OPERAND (exp, 0);
2913 if (TREE_CODE_CLASS (code) == '<'
2914 || TREE_CODE_CLASS (code) == '1'
2915 || TREE_CODE_CLASS (code) == '2')
2916 type = TREE_TYPE (arg0);
2917 if (TREE_CODE_CLASS (code) == '2'
2918 || TREE_CODE_CLASS (code) == '<'
2919 || (TREE_CODE_CLASS (code) == 'e'
2920 && TREE_CODE_LENGTH (code) > 1))
2921 arg1 = TREE_OPERAND (exp, 1);
2922 }
2923
2924 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
2925 lose a cast by accident. */
2926 if (type != NULL_TREE && orig_type == NULL_TREE)
2927 orig_type = type;
2928
2929 switch (code)
2930 {
2931 case TRUTH_NOT_EXPR:
2932 in_p = ! in_p, exp = arg0;
2933 continue;
2934
2935 case EQ_EXPR: case NE_EXPR:
2936 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
2937 /* We can only do something if the range is testing for zero
2938 and if the second operand is an integer constant. Note that
2939 saying something is "in" the range we make is done by
2940 complementing IN_P since it will set in the initial case of
2941 being not equal to zero; "out" is leaving it alone. */
2942 if (low == 0 || high == 0
2943 || ! integer_zerop (low) || ! integer_zerop (high)
2944 || TREE_CODE (arg1) != INTEGER_CST)
2945 break;
2946
2947 switch (code)
2948 {
2949 case NE_EXPR: /* - [c, c] */
2950 low = high = arg1;
2951 break;
2952 case EQ_EXPR: /* + [c, c] */
2953 in_p = ! in_p, low = high = arg1;
2954 break;
2955 case GT_EXPR: /* - [-, c] */
2956 low = 0, high = arg1;
2957 break;
2958 case GE_EXPR: /* + [c, -] */
2959 in_p = ! in_p, low = arg1, high = 0;
2960 break;
2961 case LT_EXPR: /* - [c, -] */
2962 low = arg1, high = 0;
2963 break;
2964 case LE_EXPR: /* + [-, c] */
2965 in_p = ! in_p, low = 0, high = arg1;
2966 break;
2967 default:
2968 abort ();
2969 }
2970
2971 exp = arg0;
2972
2973 /* If this is an unsigned comparison, we also know that EXP is
2974 greater than or equal to zero. We base the range tests we make
2975 on that fact, so we record it here so we can parse existing
2976 range tests. */
2977 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
2978 {
2979 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
2980 1, convert (type, integer_zero_node),
2981 NULL_TREE))
2982 break;
2983
2984 in_p = n_in_p, low = n_low, high = n_high;
2985
2986 /* If the high bound is missing, but we
2987 have a low bound, reverse the range so
2988 it goes from zero to the low bound minus 1. */
2989 if (high == 0 && low)
2990 {
2991 in_p = ! in_p;
2992 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
2993 integer_one_node, 0);
2994 low = convert (type, integer_zero_node);
2995 }
2996 }
2997 continue;
2998
2999 case NEGATE_EXPR:
3000 /* (-x) IN [a,b] -> x in [-b, -a] */
3001 n_low = range_binop (MINUS_EXPR, type,
3002 convert (type, integer_zero_node), 0, high, 1);
3003 n_high = range_binop (MINUS_EXPR, type,
3004 convert (type, integer_zero_node), 0, low, 0);
3005 low = n_low, high = n_high;
3006 exp = arg0;
3007 continue;
3008
3009 case BIT_NOT_EXPR:
3010 /* ~ X -> -X - 1 */
3011 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3012 convert (type, integer_one_node));
3013 continue;
3014
3015 case PLUS_EXPR: case MINUS_EXPR:
3016 if (TREE_CODE (arg1) != INTEGER_CST)
3017 break;
3018
3019 /* If EXP is signed, any overflow in the computation is undefined,
3020 so we don't worry about it so long as our computations on
3021 the bounds don't overflow. For unsigned, overflow is defined
3022 and this is exactly the right thing. */
3023 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3024 type, low, 0, arg1, 0);
3025 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3026 type, high, 1, arg1, 0);
3027 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3028 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3029 break;
3030
3031 /* Check for an unsigned range which has wrapped around the maximum
3032 value thus making n_high < n_low, and normalize it. */
3033 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3034 {
3035 low = range_binop (PLUS_EXPR, type, n_high, 0,
3036 integer_one_node, 0);
3037 high = range_binop (MINUS_EXPR, type, n_low, 0,
3038 integer_one_node, 0);
3039
3040 /* If the range is of the form +/- [ x+1, x ], we won't
3041 be able to normalize it. But then, it represents the
3042 whole range or the empty set, so make it
3043 +/- [ -, - ]. */
3044 if (tree_int_cst_equal (n_low, low)
3045 && tree_int_cst_equal (n_high, high))
3046 low = high = 0;
3047 else
3048 in_p = ! in_p;
3049 }
3050 else
3051 low = n_low, high = n_high;
3052
3053 exp = arg0;
3054 continue;
3055
3056 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3057 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3058 break;
3059
3060 if (! INTEGRAL_TYPE_P (type)
3061 || (low != 0 && ! int_fits_type_p (low, type))
3062 || (high != 0 && ! int_fits_type_p (high, type)))
3063 break;
3064
3065 n_low = low, n_high = high;
3066
3067 if (n_low != 0)
3068 n_low = convert (type, n_low);
3069
3070 if (n_high != 0)
3071 n_high = convert (type, n_high);
3072
3073 /* If we're converting from an unsigned to a signed type,
3074 we will be doing the comparison as unsigned. The tests above
3075 have already verified that LOW and HIGH are both positive.
3076
3077 So we have to make sure that the original unsigned value will
3078 be interpreted as positive. */
3079 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3080 {
3081 tree equiv_type = (*lang_hooks.types.type_for_mode)
3082 (TYPE_MODE (type), 1);
3083 tree high_positive;
3084
3085 /* A range without an upper bound is, naturally, unbounded.
3086 Since convert would have cropped a very large value, use
3087 the max value for the destination type. */
3088 high_positive
3089 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3090 : TYPE_MAX_VALUE (type);
3091
3092 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3093 high_positive = fold (build (RSHIFT_EXPR, type,
3094 convert (type, high_positive),
3095 convert (type, integer_one_node)));
3096
3097 /* If the low bound is specified, "and" the range with the
3098 range for which the original unsigned value will be
3099 positive. */
3100 if (low != 0)
3101 {
3102 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3103 1, n_low, n_high,
3104 1, convert (type, integer_zero_node),
3105 high_positive))
3106 break;
3107
3108 in_p = (n_in_p == in_p);
3109 }
3110 else
3111 {
3112 /* Otherwise, "or" the range with the range of the input
3113 that will be interpreted as negative. */
3114 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3115 0, n_low, n_high,
3116 1, convert (type, integer_zero_node),
3117 high_positive))
3118 break;
3119
3120 in_p = (in_p != n_in_p);
3121 }
3122 }
3123
3124 exp = arg0;
3125 low = n_low, high = n_high;
3126 continue;
3127
3128 default:
3129 break;
3130 }
3131
3132 break;
3133 }
3134
3135 /* If EXP is a constant, we can evaluate whether this is true or false. */
3136 if (TREE_CODE (exp) == INTEGER_CST)
3137 {
3138 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3139 exp, 0, low, 0))
3140 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3141 exp, 1, high, 1)));
3142 low = high = 0;
3143 exp = 0;
3144 }
3145
3146 *pin_p = in_p, *plow = low, *phigh = high;
3147 return exp;
3148 }
3149 \f
3150 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3151 type, TYPE, return an expression to test if EXP is in (or out of, depending
3152 on IN_P) the range. */
3153
3154 static tree
3155 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3156 {
3157 tree etype = TREE_TYPE (exp);
3158 tree value;
3159
3160 if (! in_p
3161 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3162 return invert_truthvalue (value);
3163
3164 if (low == 0 && high == 0)
3165 return convert (type, integer_one_node);
3166
3167 if (low == 0)
3168 return fold (build (LE_EXPR, type, exp, high));
3169
3170 if (high == 0)
3171 return fold (build (GE_EXPR, type, exp, low));
3172
3173 if (operand_equal_p (low, high, 0))
3174 return fold (build (EQ_EXPR, type, exp, low));
3175
3176 if (integer_zerop (low))
3177 {
3178 if (! TREE_UNSIGNED (etype))
3179 {
3180 etype = (*lang_hooks.types.unsigned_type) (etype);
3181 high = convert (etype, high);
3182 exp = convert (etype, exp);
3183 }
3184 return build_range_check (type, exp, 1, 0, high);
3185 }
3186
3187 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3188 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3189 {
3190 unsigned HOST_WIDE_INT lo;
3191 HOST_WIDE_INT hi;
3192 int prec;
3193
3194 prec = TYPE_PRECISION (etype);
3195 if (prec <= HOST_BITS_PER_WIDE_INT)
3196 {
3197 hi = 0;
3198 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3199 }
3200 else
3201 {
3202 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3203 lo = (unsigned HOST_WIDE_INT) -1;
3204 }
3205
3206 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3207 {
3208 if (TREE_UNSIGNED (etype))
3209 {
3210 etype = (*lang_hooks.types.signed_type) (etype);
3211 exp = convert (etype, exp);
3212 }
3213 return fold (build (GT_EXPR, type, exp,
3214 convert (etype, integer_zero_node)));
3215 }
3216 }
3217
3218 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3219 && ! TREE_OVERFLOW (value))
3220 return build_range_check (type,
3221 fold (build (MINUS_EXPR, etype, exp, low)),
3222 1, convert (etype, integer_zero_node), value);
3223
3224 return 0;
3225 }
3226 \f
3227 /* Given two ranges, see if we can merge them into one. Return 1 if we
3228 can, 0 if we can't. Set the output range into the specified parameters. */
3229
3230 static int
3231 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0, tree high0,
3232 int in1_p, tree low1, tree high1)
3233 {
3234 int no_overlap;
3235 int subset;
3236 int temp;
3237 tree tem;
3238 int in_p;
3239 tree low, high;
3240 int lowequal = ((low0 == 0 && low1 == 0)
3241 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3242 low0, 0, low1, 0)));
3243 int highequal = ((high0 == 0 && high1 == 0)
3244 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3245 high0, 1, high1, 1)));
3246
3247 /* Make range 0 be the range that starts first, or ends last if they
3248 start at the same value. Swap them if it isn't. */
3249 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3250 low0, 0, low1, 0))
3251 || (lowequal
3252 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3253 high1, 1, high0, 1))))
3254 {
3255 temp = in0_p, in0_p = in1_p, in1_p = temp;
3256 tem = low0, low0 = low1, low1 = tem;
3257 tem = high0, high0 = high1, high1 = tem;
3258 }
3259
3260 /* Now flag two cases, whether the ranges are disjoint or whether the
3261 second range is totally subsumed in the first. Note that the tests
3262 below are simplified by the ones above. */
3263 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3264 high0, 1, low1, 0));
3265 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3266 high1, 1, high0, 1));
3267
3268 /* We now have four cases, depending on whether we are including or
3269 excluding the two ranges. */
3270 if (in0_p && in1_p)
3271 {
3272 /* If they don't overlap, the result is false. If the second range
3273 is a subset it is the result. Otherwise, the range is from the start
3274 of the second to the end of the first. */
3275 if (no_overlap)
3276 in_p = 0, low = high = 0;
3277 else if (subset)
3278 in_p = 1, low = low1, high = high1;
3279 else
3280 in_p = 1, low = low1, high = high0;
3281 }
3282
3283 else if (in0_p && ! in1_p)
3284 {
3285 /* If they don't overlap, the result is the first range. If they are
3286 equal, the result is false. If the second range is a subset of the
3287 first, and the ranges begin at the same place, we go from just after
3288 the end of the first range to the end of the second. If the second
3289 range is not a subset of the first, or if it is a subset and both
3290 ranges end at the same place, the range starts at the start of the
3291 first range and ends just before the second range.
3292 Otherwise, we can't describe this as a single range. */
3293 if (no_overlap)
3294 in_p = 1, low = low0, high = high0;
3295 else if (lowequal && highequal)
3296 in_p = 0, low = high = 0;
3297 else if (subset && lowequal)
3298 {
3299 in_p = 1, high = high0;
3300 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3301 integer_one_node, 0);
3302 }
3303 else if (! subset || highequal)
3304 {
3305 in_p = 1, low = low0;
3306 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3307 integer_one_node, 0);
3308 }
3309 else
3310 return 0;
3311 }
3312
3313 else if (! in0_p && in1_p)
3314 {
3315 /* If they don't overlap, the result is the second range. If the second
3316 is a subset of the first, the result is false. Otherwise,
3317 the range starts just after the first range and ends at the
3318 end of the second. */
3319 if (no_overlap)
3320 in_p = 1, low = low1, high = high1;
3321 else if (subset || highequal)
3322 in_p = 0, low = high = 0;
3323 else
3324 {
3325 in_p = 1, high = high1;
3326 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3327 integer_one_node, 0);
3328 }
3329 }
3330
3331 else
3332 {
3333 /* The case where we are excluding both ranges. Here the complex case
3334 is if they don't overlap. In that case, the only time we have a
3335 range is if they are adjacent. If the second is a subset of the
3336 first, the result is the first. Otherwise, the range to exclude
3337 starts at the beginning of the first range and ends at the end of the
3338 second. */
3339 if (no_overlap)
3340 {
3341 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3342 range_binop (PLUS_EXPR, NULL_TREE,
3343 high0, 1,
3344 integer_one_node, 1),
3345 1, low1, 0)))
3346 in_p = 0, low = low0, high = high1;
3347 else
3348 return 0;
3349 }
3350 else if (subset)
3351 in_p = 0, low = low0, high = high0;
3352 else
3353 in_p = 0, low = low0, high = high1;
3354 }
3355
3356 *pin_p = in_p, *plow = low, *phigh = high;
3357 return 1;
3358 }
3359 \f
3360 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3361 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3362 #endif
3363
3364 /* EXP is some logical combination of boolean tests. See if we can
3365 merge it into some range test. Return the new tree if so. */
3366
3367 static tree
3368 fold_range_test (tree exp)
3369 {
3370 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3371 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3372 int in0_p, in1_p, in_p;
3373 tree low0, low1, low, high0, high1, high;
3374 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3375 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3376 tree tem;
3377
3378 /* If this is an OR operation, invert both sides; we will invert
3379 again at the end. */
3380 if (or_op)
3381 in0_p = ! in0_p, in1_p = ! in1_p;
3382
3383 /* If both expressions are the same, if we can merge the ranges, and we
3384 can build the range test, return it or it inverted. If one of the
3385 ranges is always true or always false, consider it to be the same
3386 expression as the other. */
3387 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3388 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3389 in1_p, low1, high1)
3390 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3391 lhs != 0 ? lhs
3392 : rhs != 0 ? rhs : integer_zero_node,
3393 in_p, low, high))))
3394 return or_op ? invert_truthvalue (tem) : tem;
3395
3396 /* On machines where the branch cost is expensive, if this is a
3397 short-circuited branch and the underlying object on both sides
3398 is the same, make a non-short-circuit operation. */
3399 else if (RANGE_TEST_NON_SHORT_CIRCUIT
3400 && lhs != 0 && rhs != 0
3401 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3402 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3403 && operand_equal_p (lhs, rhs, 0))
3404 {
3405 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3406 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3407 which cases we can't do this. */
3408 if (simple_operand_p (lhs))
3409 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3410 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3411 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3412 TREE_OPERAND (exp, 1));
3413
3414 else if ((*lang_hooks.decls.global_bindings_p) () == 0
3415 && ! CONTAINS_PLACEHOLDER_P (lhs))
3416 {
3417 tree common = save_expr (lhs);
3418
3419 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3420 or_op ? ! in0_p : in0_p,
3421 low0, high0))
3422 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3423 or_op ? ! in1_p : in1_p,
3424 low1, high1))))
3425 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3426 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3427 TREE_TYPE (exp), lhs, rhs);
3428 }
3429 }
3430
3431 return 0;
3432 }
3433 \f
3434 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3435 bit value. Arrange things so the extra bits will be set to zero if and
3436 only if C is signed-extended to its full width. If MASK is nonzero,
3437 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3438
3439 static tree
3440 unextend (tree c, int p, int unsignedp, tree mask)
3441 {
3442 tree type = TREE_TYPE (c);
3443 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3444 tree temp;
3445
3446 if (p == modesize || unsignedp)
3447 return c;
3448
3449 /* We work by getting just the sign bit into the low-order bit, then
3450 into the high-order bit, then sign-extend. We then XOR that value
3451 with C. */
3452 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3453 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3454
3455 /* We must use a signed type in order to get an arithmetic right shift.
3456 However, we must also avoid introducing accidental overflows, so that
3457 a subsequent call to integer_zerop will work. Hence we must
3458 do the type conversion here. At this point, the constant is either
3459 zero or one, and the conversion to a signed type can never overflow.
3460 We could get an overflow if this conversion is done anywhere else. */
3461 if (TREE_UNSIGNED (type))
3462 temp = convert ((*lang_hooks.types.signed_type) (type), temp);
3463
3464 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3465 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3466 if (mask != 0)
3467 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3468 /* If necessary, convert the type back to match the type of C. */
3469 if (TREE_UNSIGNED (type))
3470 temp = convert (type, temp);
3471
3472 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3473 }
3474 \f
3475 /* Find ways of folding logical expressions of LHS and RHS:
3476 Try to merge two comparisons to the same innermost item.
3477 Look for range tests like "ch >= '0' && ch <= '9'".
3478 Look for combinations of simple terms on machines with expensive branches
3479 and evaluate the RHS unconditionally.
3480
3481 For example, if we have p->a == 2 && p->b == 4 and we can make an
3482 object large enough to span both A and B, we can do this with a comparison
3483 against the object ANDed with the a mask.
3484
3485 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3486 operations to do this with one comparison.
3487
3488 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3489 function and the one above.
3490
3491 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3492 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3493
3494 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3495 two operands.
3496
3497 We return the simplified tree or 0 if no optimization is possible. */
3498
3499 static tree
3500 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
3501 {
3502 /* If this is the "or" of two comparisons, we can do something if
3503 the comparisons are NE_EXPR. If this is the "and", we can do something
3504 if the comparisons are EQ_EXPR. I.e.,
3505 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3506
3507 WANTED_CODE is this operation code. For single bit fields, we can
3508 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3509 comparison for one-bit fields. */
3510
3511 enum tree_code wanted_code;
3512 enum tree_code lcode, rcode;
3513 tree ll_arg, lr_arg, rl_arg, rr_arg;
3514 tree ll_inner, lr_inner, rl_inner, rr_inner;
3515 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3516 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3517 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3518 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3519 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3520 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3521 enum machine_mode lnmode, rnmode;
3522 tree ll_mask, lr_mask, rl_mask, rr_mask;
3523 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3524 tree l_const, r_const;
3525 tree lntype, rntype, result;
3526 int first_bit, end_bit;
3527 int volatilep;
3528
3529 /* Start by getting the comparison codes. Fail if anything is volatile.
3530 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3531 it were surrounded with a NE_EXPR. */
3532
3533 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3534 return 0;
3535
3536 lcode = TREE_CODE (lhs);
3537 rcode = TREE_CODE (rhs);
3538
3539 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3540 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3541
3542 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3543 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3544
3545 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3546 return 0;
3547
3548 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3549 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3550
3551 ll_arg = TREE_OPERAND (lhs, 0);
3552 lr_arg = TREE_OPERAND (lhs, 1);
3553 rl_arg = TREE_OPERAND (rhs, 0);
3554 rr_arg = TREE_OPERAND (rhs, 1);
3555
3556 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3557 if (simple_operand_p (ll_arg)
3558 && simple_operand_p (lr_arg)
3559 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3560 {
3561 int compcode;
3562
3563 if (operand_equal_p (ll_arg, rl_arg, 0)
3564 && operand_equal_p (lr_arg, rr_arg, 0))
3565 {
3566 int lcompcode, rcompcode;
3567
3568 lcompcode = comparison_to_compcode (lcode);
3569 rcompcode = comparison_to_compcode (rcode);
3570 compcode = (code == TRUTH_AND_EXPR)
3571 ? lcompcode & rcompcode
3572 : lcompcode | rcompcode;
3573 }
3574 else if (operand_equal_p (ll_arg, rr_arg, 0)
3575 && operand_equal_p (lr_arg, rl_arg, 0))
3576 {
3577 int lcompcode, rcompcode;
3578
3579 rcode = swap_tree_comparison (rcode);
3580 lcompcode = comparison_to_compcode (lcode);
3581 rcompcode = comparison_to_compcode (rcode);
3582 compcode = (code == TRUTH_AND_EXPR)
3583 ? lcompcode & rcompcode
3584 : lcompcode | rcompcode;
3585 }
3586 else
3587 compcode = -1;
3588
3589 if (compcode == COMPCODE_TRUE)
3590 return convert (truth_type, integer_one_node);
3591 else if (compcode == COMPCODE_FALSE)
3592 return convert (truth_type, integer_zero_node);
3593 else if (compcode != -1)
3594 return build (compcode_to_comparison (compcode),
3595 truth_type, ll_arg, lr_arg);
3596 }
3597
3598 /* If the RHS can be evaluated unconditionally and its operands are
3599 simple, it wins to evaluate the RHS unconditionally on machines
3600 with expensive branches. In this case, this isn't a comparison
3601 that can be merged. Avoid doing this if the RHS is a floating-point
3602 comparison since those can trap. */
3603
3604 if (BRANCH_COST >= 2
3605 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3606 && simple_operand_p (rl_arg)
3607 && simple_operand_p (rr_arg))
3608 {
3609 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
3610 if (code == TRUTH_OR_EXPR
3611 && lcode == NE_EXPR && integer_zerop (lr_arg)
3612 && rcode == NE_EXPR && integer_zerop (rr_arg)
3613 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3614 return build (NE_EXPR, truth_type,
3615 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3616 ll_arg, rl_arg),
3617 integer_zero_node);
3618
3619 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
3620 if (code == TRUTH_AND_EXPR
3621 && lcode == EQ_EXPR && integer_zerop (lr_arg)
3622 && rcode == EQ_EXPR && integer_zerop (rr_arg)
3623 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3624 return build (EQ_EXPR, truth_type,
3625 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3626 ll_arg, rl_arg),
3627 integer_zero_node);
3628
3629 return build (code, truth_type, lhs, rhs);
3630 }
3631
3632 /* See if the comparisons can be merged. Then get all the parameters for
3633 each side. */
3634
3635 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3636 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3637 return 0;
3638
3639 volatilep = 0;
3640 ll_inner = decode_field_reference (ll_arg,
3641 &ll_bitsize, &ll_bitpos, &ll_mode,
3642 &ll_unsignedp, &volatilep, &ll_mask,
3643 &ll_and_mask);
3644 lr_inner = decode_field_reference (lr_arg,
3645 &lr_bitsize, &lr_bitpos, &lr_mode,
3646 &lr_unsignedp, &volatilep, &lr_mask,
3647 &lr_and_mask);
3648 rl_inner = decode_field_reference (rl_arg,
3649 &rl_bitsize, &rl_bitpos, &rl_mode,
3650 &rl_unsignedp, &volatilep, &rl_mask,
3651 &rl_and_mask);
3652 rr_inner = decode_field_reference (rr_arg,
3653 &rr_bitsize, &rr_bitpos, &rr_mode,
3654 &rr_unsignedp, &volatilep, &rr_mask,
3655 &rr_and_mask);
3656
3657 /* It must be true that the inner operation on the lhs of each
3658 comparison must be the same if we are to be able to do anything.
3659 Then see if we have constants. If not, the same must be true for
3660 the rhs's. */
3661 if (volatilep || ll_inner == 0 || rl_inner == 0
3662 || ! operand_equal_p (ll_inner, rl_inner, 0))
3663 return 0;
3664
3665 if (TREE_CODE (lr_arg) == INTEGER_CST
3666 && TREE_CODE (rr_arg) == INTEGER_CST)
3667 l_const = lr_arg, r_const = rr_arg;
3668 else if (lr_inner == 0 || rr_inner == 0
3669 || ! operand_equal_p (lr_inner, rr_inner, 0))
3670 return 0;
3671 else
3672 l_const = r_const = 0;
3673
3674 /* If either comparison code is not correct for our logical operation,
3675 fail. However, we can convert a one-bit comparison against zero into
3676 the opposite comparison against that bit being set in the field. */
3677
3678 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3679 if (lcode != wanted_code)
3680 {
3681 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3682 {
3683 /* Make the left operand unsigned, since we are only interested
3684 in the value of one bit. Otherwise we are doing the wrong
3685 thing below. */
3686 ll_unsignedp = 1;
3687 l_const = ll_mask;
3688 }
3689 else
3690 return 0;
3691 }
3692
3693 /* This is analogous to the code for l_const above. */
3694 if (rcode != wanted_code)
3695 {
3696 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3697 {
3698 rl_unsignedp = 1;
3699 r_const = rl_mask;
3700 }
3701 else
3702 return 0;
3703 }
3704
3705 /* After this point all optimizations will generate bit-field
3706 references, which we might not want. */
3707 if (! (*lang_hooks.can_use_bit_fields_p) ())
3708 return 0;
3709
3710 /* See if we can find a mode that contains both fields being compared on
3711 the left. If we can't, fail. Otherwise, update all constants and masks
3712 to be relative to a field of that size. */
3713 first_bit = MIN (ll_bitpos, rl_bitpos);
3714 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3715 lnmode = get_best_mode (end_bit - first_bit, first_bit,
3716 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3717 volatilep);
3718 if (lnmode == VOIDmode)
3719 return 0;
3720
3721 lnbitsize = GET_MODE_BITSIZE (lnmode);
3722 lnbitpos = first_bit & ~ (lnbitsize - 1);
3723 lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
3724 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3725
3726 if (BYTES_BIG_ENDIAN)
3727 {
3728 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3729 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3730 }
3731
3732 ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
3733 size_int (xll_bitpos), 0);
3734 rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
3735 size_int (xrl_bitpos), 0);
3736
3737 if (l_const)
3738 {
3739 l_const = convert (lntype, l_const);
3740 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3741 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3742 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3743 fold (build1 (BIT_NOT_EXPR,
3744 lntype, ll_mask)),
3745 0)))
3746 {
3747 warning ("comparison is always %d", wanted_code == NE_EXPR);
3748
3749 return convert (truth_type,
3750 wanted_code == NE_EXPR
3751 ? integer_one_node : integer_zero_node);
3752 }
3753 }
3754 if (r_const)
3755 {
3756 r_const = convert (lntype, r_const);
3757 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3758 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3759 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3760 fold (build1 (BIT_NOT_EXPR,
3761 lntype, rl_mask)),
3762 0)))
3763 {
3764 warning ("comparison is always %d", wanted_code == NE_EXPR);
3765
3766 return convert (truth_type,
3767 wanted_code == NE_EXPR
3768 ? integer_one_node : integer_zero_node);
3769 }
3770 }
3771
3772 /* If the right sides are not constant, do the same for it. Also,
3773 disallow this optimization if a size or signedness mismatch occurs
3774 between the left and right sides. */
3775 if (l_const == 0)
3776 {
3777 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3778 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3779 /* Make sure the two fields on the right
3780 correspond to the left without being swapped. */
3781 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3782 return 0;
3783
3784 first_bit = MIN (lr_bitpos, rr_bitpos);
3785 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3786 rnmode = get_best_mode (end_bit - first_bit, first_bit,
3787 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3788 volatilep);
3789 if (rnmode == VOIDmode)
3790 return 0;
3791
3792 rnbitsize = GET_MODE_BITSIZE (rnmode);
3793 rnbitpos = first_bit & ~ (rnbitsize - 1);
3794 rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
3795 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3796
3797 if (BYTES_BIG_ENDIAN)
3798 {
3799 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3800 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3801 }
3802
3803 lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
3804 size_int (xlr_bitpos), 0);
3805 rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
3806 size_int (xrr_bitpos), 0);
3807
3808 /* Make a mask that corresponds to both fields being compared.
3809 Do this for both items being compared. If the operands are the
3810 same size and the bits being compared are in the same position
3811 then we can do this by masking both and comparing the masked
3812 results. */
3813 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3814 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3815 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
3816 {
3817 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3818 ll_unsignedp || rl_unsignedp);
3819 if (! all_ones_mask_p (ll_mask, lnbitsize))
3820 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
3821
3822 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
3823 lr_unsignedp || rr_unsignedp);
3824 if (! all_ones_mask_p (lr_mask, rnbitsize))
3825 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
3826
3827 return build (wanted_code, truth_type, lhs, rhs);
3828 }
3829
3830 /* There is still another way we can do something: If both pairs of
3831 fields being compared are adjacent, we may be able to make a wider
3832 field containing them both.
3833
3834 Note that we still must mask the lhs/rhs expressions. Furthermore,
3835 the mask must be shifted to account for the shift done by
3836 make_bit_field_ref. */
3837 if ((ll_bitsize + ll_bitpos == rl_bitpos
3838 && lr_bitsize + lr_bitpos == rr_bitpos)
3839 || (ll_bitpos == rl_bitpos + rl_bitsize
3840 && lr_bitpos == rr_bitpos + rr_bitsize))
3841 {
3842 tree type;
3843
3844 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
3845 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
3846 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
3847 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
3848
3849 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
3850 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
3851 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
3852 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
3853
3854 /* Convert to the smaller type before masking out unwanted bits. */
3855 type = lntype;
3856 if (lntype != rntype)
3857 {
3858 if (lnbitsize > rnbitsize)
3859 {
3860 lhs = convert (rntype, lhs);
3861 ll_mask = convert (rntype, ll_mask);
3862 type = rntype;
3863 }
3864 else if (lnbitsize < rnbitsize)
3865 {
3866 rhs = convert (lntype, rhs);
3867 lr_mask = convert (lntype, lr_mask);
3868 type = lntype;
3869 }
3870 }
3871
3872 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
3873 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3874
3875 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
3876 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
3877
3878 return build (wanted_code, truth_type, lhs, rhs);
3879 }
3880
3881 return 0;
3882 }
3883
3884 /* Handle the case of comparisons with constants. If there is something in
3885 common between the masks, those bits of the constants must be the same.
3886 If not, the condition is always false. Test for this to avoid generating
3887 incorrect code below. */
3888 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3889 if (! integer_zerop (result)
3890 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3891 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3892 {
3893 if (wanted_code == NE_EXPR)
3894 {
3895 warning ("`or' of unmatched not-equal tests is always 1");
3896 return convert (truth_type, integer_one_node);
3897 }
3898 else
3899 {
3900 warning ("`and' of mutually exclusive equal-tests is always 0");
3901 return convert (truth_type, integer_zero_node);
3902 }
3903 }
3904
3905 /* Construct the expression we will return. First get the component
3906 reference we will make. Unless the mask is all ones the width of
3907 that field, perform the mask operation. Then compare with the
3908 merged constant. */
3909 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3910 ll_unsignedp || rl_unsignedp);
3911
3912 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3913 if (! all_ones_mask_p (ll_mask, lnbitsize))
3914 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
3915
3916 return build (wanted_code, truth_type, result,
3917 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
3918 }
3919 \f
3920 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
3921 constant. */
3922
3923 static tree
3924 optimize_minmax_comparison (tree t)
3925 {
3926 tree type = TREE_TYPE (t);
3927 tree arg0 = TREE_OPERAND (t, 0);
3928 enum tree_code op_code;
3929 tree comp_const = TREE_OPERAND (t, 1);
3930 tree minmax_const;
3931 int consts_equal, consts_lt;
3932 tree inner;
3933
3934 STRIP_SIGN_NOPS (arg0);
3935
3936 op_code = TREE_CODE (arg0);
3937 minmax_const = TREE_OPERAND (arg0, 1);
3938 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
3939 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
3940 inner = TREE_OPERAND (arg0, 0);
3941
3942 /* If something does not permit us to optimize, return the original tree. */
3943 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
3944 || TREE_CODE (comp_const) != INTEGER_CST
3945 || TREE_CONSTANT_OVERFLOW (comp_const)
3946 || TREE_CODE (minmax_const) != INTEGER_CST
3947 || TREE_CONSTANT_OVERFLOW (minmax_const))
3948 return t;
3949
3950 /* Now handle all the various comparison codes. We only handle EQ_EXPR
3951 and GT_EXPR, doing the rest with recursive calls using logical
3952 simplifications. */
3953 switch (TREE_CODE (t))
3954 {
3955 case NE_EXPR: case LT_EXPR: case LE_EXPR:
3956 return
3957 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
3958
3959 case GE_EXPR:
3960 return
3961 fold (build (TRUTH_ORIF_EXPR, type,
3962 optimize_minmax_comparison
3963 (build (EQ_EXPR, type, arg0, comp_const)),
3964 optimize_minmax_comparison
3965 (build (GT_EXPR, type, arg0, comp_const))));
3966
3967 case EQ_EXPR:
3968 if (op_code == MAX_EXPR && consts_equal)
3969 /* MAX (X, 0) == 0 -> X <= 0 */
3970 return fold (build (LE_EXPR, type, inner, comp_const));
3971
3972 else if (op_code == MAX_EXPR && consts_lt)
3973 /* MAX (X, 0) == 5 -> X == 5 */
3974 return fold (build (EQ_EXPR, type, inner, comp_const));
3975
3976 else if (op_code == MAX_EXPR)
3977 /* MAX (X, 0) == -1 -> false */
3978 return omit_one_operand (type, integer_zero_node, inner);
3979
3980 else if (consts_equal)
3981 /* MIN (X, 0) == 0 -> X >= 0 */
3982 return fold (build (GE_EXPR, type, inner, comp_const));
3983
3984 else if (consts_lt)
3985 /* MIN (X, 0) == 5 -> false */
3986 return omit_one_operand (type, integer_zero_node, inner);
3987
3988 else
3989 /* MIN (X, 0) == -1 -> X == -1 */
3990 return fold (build (EQ_EXPR, type, inner, comp_const));
3991
3992 case GT_EXPR:
3993 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
3994 /* MAX (X, 0) > 0 -> X > 0
3995 MAX (X, 0) > 5 -> X > 5 */
3996 return fold (build (GT_EXPR, type, inner, comp_const));
3997
3998 else if (op_code == MAX_EXPR)
3999 /* MAX (X, 0) > -1 -> true */
4000 return omit_one_operand (type, integer_one_node, inner);
4001
4002 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4003 /* MIN (X, 0) > 0 -> false
4004 MIN (X, 0) > 5 -> false */
4005 return omit_one_operand (type, integer_zero_node, inner);
4006
4007 else
4008 /* MIN (X, 0) > -1 -> X > -1 */
4009 return fold (build (GT_EXPR, type, inner, comp_const));
4010
4011 default:
4012 return t;
4013 }
4014 }
4015 \f
4016 /* T is an integer expression that is being multiplied, divided, or taken a
4017 modulus (CODE says which and what kind of divide or modulus) by a
4018 constant C. See if we can eliminate that operation by folding it with
4019 other operations already in T. WIDE_TYPE, if non-null, is a type that
4020 should be used for the computation if wider than our type.
4021
4022 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4023 (X * 2) + (Y * 4). We must, however, be assured that either the original
4024 expression would not overflow or that overflow is undefined for the type
4025 in the language in question.
4026
4027 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4028 the machine has a multiply-accumulate insn or that this is part of an
4029 addressing calculation.
4030
4031 If we return a non-null expression, it is an equivalent form of the
4032 original computation, but need not be in the original type. */
4033
4034 static tree
4035 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4036 {
4037 /* To avoid exponential search depth, refuse to allow recursion past
4038 three levels. Beyond that (1) it's highly unlikely that we'll find
4039 something interesting and (2) we've probably processed it before
4040 when we built the inner expression. */
4041
4042 static int depth;
4043 tree ret;
4044
4045 if (depth > 3)
4046 return NULL;
4047
4048 depth++;
4049 ret = extract_muldiv_1 (t, c, code, wide_type);
4050 depth--;
4051
4052 return ret;
4053 }
4054
4055 static tree
4056 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4057 {
4058 tree type = TREE_TYPE (t);
4059 enum tree_code tcode = TREE_CODE (t);
4060 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4061 > GET_MODE_SIZE (TYPE_MODE (type)))
4062 ? wide_type : type);
4063 tree t1, t2;
4064 int same_p = tcode == code;
4065 tree op0 = NULL_TREE, op1 = NULL_TREE;
4066
4067 /* Don't deal with constants of zero here; they confuse the code below. */
4068 if (integer_zerop (c))
4069 return NULL_TREE;
4070
4071 if (TREE_CODE_CLASS (tcode) == '1')
4072 op0 = TREE_OPERAND (t, 0);
4073
4074 if (TREE_CODE_CLASS (tcode) == '2')
4075 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4076
4077 /* Note that we need not handle conditional operations here since fold
4078 already handles those cases. So just do arithmetic here. */
4079 switch (tcode)
4080 {
4081 case INTEGER_CST:
4082 /* For a constant, we can always simplify if we are a multiply
4083 or (for divide and modulus) if it is a multiple of our constant. */
4084 if (code == MULT_EXPR
4085 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4086 return const_binop (code, convert (ctype, t), convert (ctype, c), 0);
4087 break;
4088
4089 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4090 /* If op0 is an expression ... */
4091 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4092 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4093 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4094 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4095 /* ... and is unsigned, and its type is smaller than ctype,
4096 then we cannot pass through as widening. */
4097 && ((TREE_UNSIGNED (TREE_TYPE (op0))
4098 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4099 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4100 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4101 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4102 /* ... or its type is larger than ctype,
4103 then we cannot pass through this truncation. */
4104 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4105 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4106 /* ... or signedness changes for division or modulus,
4107 then we cannot pass through this conversion. */
4108 || (code != MULT_EXPR
4109 && (TREE_UNSIGNED (ctype)
4110 != TREE_UNSIGNED (TREE_TYPE (op0))))))
4111 break;
4112
4113 /* Pass the constant down and see if we can make a simplification. If
4114 we can, replace this expression with the inner simplification for
4115 possible later conversion to our or some other type. */
4116 if ((t2 = convert (TREE_TYPE (op0), c)) != 0
4117 && TREE_CODE (t2) == INTEGER_CST
4118 && ! TREE_CONSTANT_OVERFLOW (t2)
4119 && (0 != (t1 = extract_muldiv (op0, t2, code,
4120 code == MULT_EXPR
4121 ? ctype : NULL_TREE))))
4122 return t1;
4123 break;
4124
4125 case NEGATE_EXPR: case ABS_EXPR:
4126 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4127 return fold (build1 (tcode, ctype, convert (ctype, t1)));
4128 break;
4129
4130 case MIN_EXPR: case MAX_EXPR:
4131 /* If widening the type changes the signedness, then we can't perform
4132 this optimization as that changes the result. */
4133 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4134 break;
4135
4136 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4137 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4138 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4139 {
4140 if (tree_int_cst_sgn (c) < 0)
4141 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4142
4143 return fold (build (tcode, ctype, convert (ctype, t1),
4144 convert (ctype, t2)));
4145 }
4146 break;
4147
4148 case WITH_RECORD_EXPR:
4149 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4150 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4151 TREE_OPERAND (t, 1));
4152 break;
4153
4154 case LSHIFT_EXPR: case RSHIFT_EXPR:
4155 /* If the second operand is constant, this is a multiplication
4156 or floor division, by a power of two, so we can treat it that
4157 way unless the multiplier or divisor overflows. */
4158 if (TREE_CODE (op1) == INTEGER_CST
4159 /* const_binop may not detect overflow correctly,
4160 so check for it explicitly here. */
4161 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4162 && TREE_INT_CST_HIGH (op1) == 0
4163 && 0 != (t1 = convert (ctype,
4164 const_binop (LSHIFT_EXPR, size_one_node,
4165 op1, 0)))
4166 && ! TREE_OVERFLOW (t1))
4167 return extract_muldiv (build (tcode == LSHIFT_EXPR
4168 ? MULT_EXPR : FLOOR_DIV_EXPR,
4169 ctype, convert (ctype, op0), t1),
4170 c, code, wide_type);
4171 break;
4172
4173 case PLUS_EXPR: case MINUS_EXPR:
4174 /* See if we can eliminate the operation on both sides. If we can, we
4175 can return a new PLUS or MINUS. If we can't, the only remaining
4176 cases where we can do anything are if the second operand is a
4177 constant. */
4178 t1 = extract_muldiv (op0, c, code, wide_type);
4179 t2 = extract_muldiv (op1, c, code, wide_type);
4180 if (t1 != 0 && t2 != 0
4181 && (code == MULT_EXPR
4182 /* If not multiplication, we can only do this if both operands
4183 are divisible by c. */
4184 || (multiple_of_p (ctype, op0, c)
4185 && multiple_of_p (ctype, op1, c))))
4186 return fold (build (tcode, ctype, convert (ctype, t1),
4187 convert (ctype, t2)));
4188
4189 /* If this was a subtraction, negate OP1 and set it to be an addition.
4190 This simplifies the logic below. */
4191 if (tcode == MINUS_EXPR)
4192 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4193
4194 if (TREE_CODE (op1) != INTEGER_CST)
4195 break;
4196
4197 /* If either OP1 or C are negative, this optimization is not safe for
4198 some of the division and remainder types while for others we need
4199 to change the code. */
4200 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4201 {
4202 if (code == CEIL_DIV_EXPR)
4203 code = FLOOR_DIV_EXPR;
4204 else if (code == FLOOR_DIV_EXPR)
4205 code = CEIL_DIV_EXPR;
4206 else if (code != MULT_EXPR
4207 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4208 break;
4209 }
4210
4211 /* If it's a multiply or a division/modulus operation of a multiple
4212 of our constant, do the operation and verify it doesn't overflow. */
4213 if (code == MULT_EXPR
4214 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4215 {
4216 op1 = const_binop (code, convert (ctype, op1), convert (ctype, c), 0);
4217 if (op1 == 0 || TREE_OVERFLOW (op1))
4218 break;
4219 }
4220 else
4221 break;
4222
4223 /* If we have an unsigned type is not a sizetype, we cannot widen
4224 the operation since it will change the result if the original
4225 computation overflowed. */
4226 if (TREE_UNSIGNED (ctype)
4227 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4228 && ctype != type)
4229 break;
4230
4231 /* If we were able to eliminate our operation from the first side,
4232 apply our operation to the second side and reform the PLUS. */
4233 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4234 return fold (build (tcode, ctype, convert (ctype, t1), op1));
4235
4236 /* The last case is if we are a multiply. In that case, we can
4237 apply the distributive law to commute the multiply and addition
4238 if the multiplication of the constants doesn't overflow. */
4239 if (code == MULT_EXPR)
4240 return fold (build (tcode, ctype, fold (build (code, ctype,
4241 convert (ctype, op0),
4242 convert (ctype, c))),
4243 op1));
4244
4245 break;
4246
4247 case MULT_EXPR:
4248 /* We have a special case here if we are doing something like
4249 (C * 8) % 4 since we know that's zero. */
4250 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4251 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4252 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4253 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4254 return omit_one_operand (type, integer_zero_node, op0);
4255
4256 /* ... fall through ... */
4257
4258 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4259 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4260 /* If we can extract our operation from the LHS, do so and return a
4261 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4262 do something only if the second operand is a constant. */
4263 if (same_p
4264 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4265 return fold (build (tcode, ctype, convert (ctype, t1),
4266 convert (ctype, op1)));
4267 else if (tcode == MULT_EXPR && code == MULT_EXPR
4268 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4269 return fold (build (tcode, ctype, convert (ctype, op0),
4270 convert (ctype, t1)));
4271 else if (TREE_CODE (op1) != INTEGER_CST)
4272 return 0;
4273
4274 /* If these are the same operation types, we can associate them
4275 assuming no overflow. */
4276 if (tcode == code
4277 && 0 != (t1 = const_binop (MULT_EXPR, convert (ctype, op1),
4278 convert (ctype, c), 0))
4279 && ! TREE_OVERFLOW (t1))
4280 return fold (build (tcode, ctype, convert (ctype, op0), t1));
4281
4282 /* If these operations "cancel" each other, we have the main
4283 optimizations of this pass, which occur when either constant is a
4284 multiple of the other, in which case we replace this with either an
4285 operation or CODE or TCODE.
4286
4287 If we have an unsigned type that is not a sizetype, we cannot do
4288 this since it will change the result if the original computation
4289 overflowed. */
4290 if ((! TREE_UNSIGNED (ctype)
4291 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4292 && ! flag_wrapv
4293 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4294 || (tcode == MULT_EXPR
4295 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4296 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4297 {
4298 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4299 return fold (build (tcode, ctype, convert (ctype, op0),
4300 convert (ctype,
4301 const_binop (TRUNC_DIV_EXPR,
4302 op1, c, 0))));
4303 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4304 return fold (build (code, ctype, convert (ctype, op0),
4305 convert (ctype,
4306 const_binop (TRUNC_DIV_EXPR,
4307 c, op1, 0))));
4308 }
4309 break;
4310
4311 default:
4312 break;
4313 }
4314
4315 return 0;
4316 }
4317 \f
4318 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4319 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4320 that we may sometimes modify the tree. */
4321
4322 static tree
4323 strip_compound_expr (tree t, tree s)
4324 {
4325 enum tree_code code = TREE_CODE (t);
4326
4327 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4328 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4329 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4330 return TREE_OPERAND (t, 1);
4331
4332 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4333 don't bother handling any other types. */
4334 else if (code == COND_EXPR)
4335 {
4336 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4337 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4338 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4339 }
4340 else if (TREE_CODE_CLASS (code) == '1')
4341 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4342 else if (TREE_CODE_CLASS (code) == '<'
4343 || TREE_CODE_CLASS (code) == '2')
4344 {
4345 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4346 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4347 }
4348
4349 return t;
4350 }
4351 \f
4352 /* Return a node which has the indicated constant VALUE (either 0 or
4353 1), and is of the indicated TYPE. */
4354
4355 static tree
4356 constant_boolean_node (int value, tree type)
4357 {
4358 if (type == integer_type_node)
4359 return value ? integer_one_node : integer_zero_node;
4360 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4361 return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4362 integer_zero_node);
4363 else
4364 {
4365 tree t = build_int_2 (value, 0);
4366
4367 TREE_TYPE (t) = type;
4368 return t;
4369 }
4370 }
4371
4372 /* Utility function for the following routine, to see how complex a nesting of
4373 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4374 we don't care (to avoid spending too much time on complex expressions.). */
4375
4376 static int
4377 count_cond (tree expr, int lim)
4378 {
4379 int ctrue, cfalse;
4380
4381 if (TREE_CODE (expr) != COND_EXPR)
4382 return 0;
4383 else if (lim <= 0)
4384 return 0;
4385
4386 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4387 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4388 return MIN (lim, 1 + ctrue + cfalse);
4389 }
4390
4391 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4392 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4393 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4394 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4395 COND is the first argument to CODE; otherwise (as in the example
4396 given here), it is the second argument. TYPE is the type of the
4397 original expression. */
4398
4399 static tree
4400 fold_binary_op_with_conditional_arg (enum tree_code code, tree type, tree cond, tree arg, int cond_first_p)
4401 {
4402 tree test, true_value, false_value;
4403 tree lhs = NULL_TREE;
4404 tree rhs = NULL_TREE;
4405 /* In the end, we'll produce a COND_EXPR. Both arms of the
4406 conditional expression will be binary operations. The left-hand
4407 side of the expression to be executed if the condition is true
4408 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4409 of the expression to be executed if the condition is true will be
4410 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4411 but apply to the expression to be executed if the conditional is
4412 false. */
4413 tree *true_lhs;
4414 tree *true_rhs;
4415 tree *false_lhs;
4416 tree *false_rhs;
4417 /* These are the codes to use for the left-hand side and right-hand
4418 side of the COND_EXPR. Normally, they are the same as CODE. */
4419 enum tree_code lhs_code = code;
4420 enum tree_code rhs_code = code;
4421 /* And these are the types of the expressions. */
4422 tree lhs_type = type;
4423 tree rhs_type = type;
4424 int save = 0;
4425
4426 if (cond_first_p)
4427 {
4428 true_rhs = false_rhs = &arg;
4429 true_lhs = &true_value;
4430 false_lhs = &false_value;
4431 }
4432 else
4433 {
4434 true_lhs = false_lhs = &arg;
4435 true_rhs = &true_value;
4436 false_rhs = &false_value;
4437 }
4438
4439 if (TREE_CODE (cond) == COND_EXPR)
4440 {
4441 test = TREE_OPERAND (cond, 0);
4442 true_value = TREE_OPERAND (cond, 1);
4443 false_value = TREE_OPERAND (cond, 2);
4444 /* If this operand throws an expression, then it does not make
4445 sense to try to perform a logical or arithmetic operation
4446 involving it. Instead of building `a + throw 3' for example,
4447 we simply build `a, throw 3'. */
4448 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4449 {
4450 if (! cond_first_p)
4451 {
4452 lhs_code = COMPOUND_EXPR;
4453 lhs_type = void_type_node;
4454 }
4455 else
4456 lhs = true_value;
4457 }
4458 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4459 {
4460 if (! cond_first_p)
4461 {
4462 rhs_code = COMPOUND_EXPR;
4463 rhs_type = void_type_node;
4464 }
4465 else
4466 rhs = false_value;
4467 }
4468 }
4469 else
4470 {
4471 tree testtype = TREE_TYPE (cond);
4472 test = cond;
4473 true_value = convert (testtype, integer_one_node);
4474 false_value = convert (testtype, integer_zero_node);
4475 }
4476
4477 /* If ARG is complex we want to make sure we only evaluate it once. Though
4478 this is only required if it is volatile, it might be more efficient even
4479 if it is not. However, if we succeed in folding one part to a constant,
4480 we do not need to make this SAVE_EXPR. Since we do this optimization
4481 primarily to see if we do end up with constant and this SAVE_EXPR
4482 interferes with later optimizations, suppressing it when we can is
4483 important.
4484
4485 If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4486 do so. Don't try to see if the result is a constant if an arm is a
4487 COND_EXPR since we get exponential behavior in that case. */
4488
4489 if (saved_expr_p (arg))
4490 save = 1;
4491 else if (lhs == 0 && rhs == 0
4492 && !TREE_CONSTANT (arg)
4493 && (*lang_hooks.decls.global_bindings_p) () == 0
4494 && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4495 || TREE_SIDE_EFFECTS (arg)))
4496 {
4497 if (TREE_CODE (true_value) != COND_EXPR)
4498 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4499
4500 if (TREE_CODE (false_value) != COND_EXPR)
4501 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4502
4503 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4504 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4505 {
4506 arg = save_expr (arg);
4507 lhs = rhs = 0;
4508 save = 1;
4509 }
4510 }
4511
4512 if (lhs == 0)
4513 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4514 if (rhs == 0)
4515 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4516
4517 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4518
4519 if (save)
4520 return build (COMPOUND_EXPR, type,
4521 convert (void_type_node, arg),
4522 strip_compound_expr (test, arg));
4523 else
4524 return convert (type, test);
4525 }
4526
4527 \f
4528 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4529
4530 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4531 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4532 ADDEND is the same as X.
4533
4534 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4535 and finite. The problematic cases are when X is zero, and its mode
4536 has signed zeros. In the case of rounding towards -infinity,
4537 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4538 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4539
4540 static bool
4541 fold_real_zero_addition_p (tree type, tree addend, int negate)
4542 {
4543 if (!real_zerop (addend))
4544 return false;
4545
4546 /* Don't allow the fold with -fsignaling-nans. */
4547 if (HONOR_SNANS (TYPE_MODE (type)))
4548 return false;
4549
4550 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4551 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4552 return true;
4553
4554 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4555 if (TREE_CODE (addend) == REAL_CST
4556 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4557 negate = !negate;
4558
4559 /* The mode has signed zeros, and we have to honor their sign.
4560 In this situation, there is only one case we can return true for.
4561 X - 0 is the same as X unless rounding towards -infinity is
4562 supported. */
4563 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4564 }
4565
4566 /* Subroutine of fold() that checks comparisons of built-in math
4567 functions against real constants.
4568
4569 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
4570 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
4571 is the type of the result and ARG0 and ARG1 are the operands of the
4572 comparison. ARG1 must be a TREE_REAL_CST.
4573
4574 The function returns the constant folded tree if a simplification
4575 can be made, and NULL_TREE otherwise. */
4576
4577 static tree
4578 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code, tree type, tree arg0, tree arg1)
4579 {
4580 REAL_VALUE_TYPE c;
4581
4582 if (fcode == BUILT_IN_SQRT
4583 || fcode == BUILT_IN_SQRTF
4584 || fcode == BUILT_IN_SQRTL)
4585 {
4586 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
4587 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
4588
4589 c = TREE_REAL_CST (arg1);
4590 if (REAL_VALUE_NEGATIVE (c))
4591 {
4592 /* sqrt(x) < y is always false, if y is negative. */
4593 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
4594 return omit_one_operand (type,
4595 convert (type, integer_zero_node),
4596 arg);
4597
4598 /* sqrt(x) > y is always true, if y is negative and we
4599 don't care about NaNs, i.e. negative values of x. */
4600 if (code == NE_EXPR || !HONOR_NANS (mode))
4601 return omit_one_operand (type,
4602 convert (type, integer_one_node),
4603 arg);
4604
4605 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
4606 return fold (build (GE_EXPR, type, arg,
4607 build_real (TREE_TYPE (arg), dconst0)));
4608 }
4609 else if (code == GT_EXPR || code == GE_EXPR)
4610 {
4611 REAL_VALUE_TYPE c2;
4612
4613 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4614 real_convert (&c2, mode, &c2);
4615
4616 if (REAL_VALUE_ISINF (c2))
4617 {
4618 /* sqrt(x) > y is x == +Inf, when y is very large. */
4619 if (HONOR_INFINITIES (mode))
4620 return fold (build (EQ_EXPR, type, arg,
4621 build_real (TREE_TYPE (arg), c2)));
4622
4623 /* sqrt(x) > y is always false, when y is very large
4624 and we don't care about infinities. */
4625 return omit_one_operand (type,
4626 convert (type, integer_zero_node),
4627 arg);
4628 }
4629
4630 /* sqrt(x) > c is the same as x > c*c. */
4631 return fold (build (code, type, arg,
4632 build_real (TREE_TYPE (arg), c2)));
4633 }
4634 else if (code == LT_EXPR || code == LE_EXPR)
4635 {
4636 REAL_VALUE_TYPE c2;
4637
4638 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4639 real_convert (&c2, mode, &c2);
4640
4641 if (REAL_VALUE_ISINF (c2))
4642 {
4643 /* sqrt(x) < y is always true, when y is a very large
4644 value and we don't care about NaNs or Infinities. */
4645 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
4646 return omit_one_operand (type,
4647 convert (type, integer_one_node),
4648 arg);
4649
4650 /* sqrt(x) < y is x != +Inf when y is very large and we
4651 don't care about NaNs. */
4652 if (! HONOR_NANS (mode))
4653 return fold (build (NE_EXPR, type, arg,
4654 build_real (TREE_TYPE (arg), c2)));
4655
4656 /* sqrt(x) < y is x >= 0 when y is very large and we
4657 don't care about Infinities. */
4658 if (! HONOR_INFINITIES (mode))
4659 return fold (build (GE_EXPR, type, arg,
4660 build_real (TREE_TYPE (arg), dconst0)));
4661
4662 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
4663 if ((*lang_hooks.decls.global_bindings_p) () != 0
4664 || CONTAINS_PLACEHOLDER_P (arg))
4665 return NULL_TREE;
4666
4667 arg = save_expr (arg);
4668 return fold (build (TRUTH_ANDIF_EXPR, type,
4669 fold (build (GE_EXPR, type, arg,
4670 build_real (TREE_TYPE (arg),
4671 dconst0))),
4672 fold (build (NE_EXPR, type, arg,
4673 build_real (TREE_TYPE (arg),
4674 c2)))));
4675 }
4676
4677 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
4678 if (! HONOR_NANS (mode))
4679 return fold (build (code, type, arg,
4680 build_real (TREE_TYPE (arg), c2)));
4681
4682 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
4683 if ((*lang_hooks.decls.global_bindings_p) () == 0
4684 && ! CONTAINS_PLACEHOLDER_P (arg))
4685 {
4686 arg = save_expr (arg);
4687 return fold (build (TRUTH_ANDIF_EXPR, type,
4688 fold (build (GE_EXPR, type, arg,
4689 build_real (TREE_TYPE (arg),
4690 dconst0))),
4691 fold (build (code, type, arg,
4692 build_real (TREE_TYPE (arg),
4693 c2)))));
4694 }
4695 }
4696 }
4697
4698 return NULL_TREE;
4699 }
4700
4701 /* Subroutine of fold() that optimizes comparisons against Infinities,
4702 either +Inf or -Inf.
4703
4704 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
4705 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
4706 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
4707
4708 The function returns the constant folded tree if a simplification
4709 can be made, and NULL_TREE otherwise. */
4710
4711 static tree
4712 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
4713 {
4714 enum machine_mode mode;
4715 REAL_VALUE_TYPE max;
4716 tree temp;
4717 bool neg;
4718
4719 mode = TYPE_MODE (TREE_TYPE (arg0));
4720
4721 /* For negative infinity swap the sense of the comparison. */
4722 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
4723 if (neg)
4724 code = swap_tree_comparison (code);
4725
4726 switch (code)
4727 {
4728 case GT_EXPR:
4729 /* x > +Inf is always false, if with ignore sNANs. */
4730 if (HONOR_SNANS (mode))
4731 return NULL_TREE;
4732 return omit_one_operand (type,
4733 convert (type, integer_zero_node),
4734 arg0);
4735
4736 case LE_EXPR:
4737 /* x <= +Inf is always true, if we don't case about NaNs. */
4738 if (! HONOR_NANS (mode))
4739 return omit_one_operand (type,
4740 convert (type, integer_one_node),
4741 arg0);
4742
4743 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
4744 if ((*lang_hooks.decls.global_bindings_p) () == 0
4745 && ! CONTAINS_PLACEHOLDER_P (arg0))
4746 {
4747 arg0 = save_expr (arg0);
4748 return fold (build (EQ_EXPR, type, arg0, arg0));
4749 }
4750 break;
4751
4752 case EQ_EXPR:
4753 case GE_EXPR:
4754 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
4755 real_maxval (&max, neg, mode);
4756 return fold (build (neg ? LT_EXPR : GT_EXPR, type,
4757 arg0, build_real (TREE_TYPE (arg0), max)));
4758
4759 case LT_EXPR:
4760 /* x < +Inf is always equal to x <= DBL_MAX. */
4761 real_maxval (&max, neg, mode);
4762 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
4763 arg0, build_real (TREE_TYPE (arg0), max)));
4764
4765 case NE_EXPR:
4766 /* x != +Inf is always equal to !(x > DBL_MAX). */
4767 real_maxval (&max, neg, mode);
4768 if (! HONOR_NANS (mode))
4769 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
4770 arg0, build_real (TREE_TYPE (arg0), max)));
4771 temp = fold (build (neg ? LT_EXPR : GT_EXPR, type,
4772 arg0, build_real (TREE_TYPE (arg0), max)));
4773 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
4774
4775 default:
4776 break;
4777 }
4778
4779 return NULL_TREE;
4780 }
4781
4782 /* If CODE with arguments ARG0 and ARG1 represents a single bit
4783 equality/inequality test, then return a simplified form of
4784 the test using shifts and logical operations. Otherwise return
4785 NULL. TYPE is the desired result type. */
4786
4787 tree
4788 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
4789 tree result_type)
4790 {
4791 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
4792 operand 0. */
4793 if (code == TRUTH_NOT_EXPR)
4794 {
4795 code = TREE_CODE (arg0);
4796 if (code != NE_EXPR && code != EQ_EXPR)
4797 return NULL_TREE;
4798
4799 /* Extract the arguments of the EQ/NE. */
4800 arg1 = TREE_OPERAND (arg0, 1);
4801 arg0 = TREE_OPERAND (arg0, 0);
4802
4803 /* This requires us to invert the code. */
4804 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
4805 }
4806
4807 /* If this is testing a single bit, we can optimize the test. */
4808 if ((code == NE_EXPR || code == EQ_EXPR)
4809 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
4810 && integer_pow2p (TREE_OPERAND (arg0, 1)))
4811 {
4812 tree inner = TREE_OPERAND (arg0, 0);
4813 tree type = TREE_TYPE (arg0);
4814 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
4815 enum machine_mode operand_mode = TYPE_MODE (type);
4816 int ops_unsigned;
4817 tree signed_type, unsigned_type;
4818 tree arg00;
4819
4820 /* If we have (A & C) != 0 where C is the sign bit of A, convert
4821 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
4822 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
4823 if (arg00 != NULL_TREE)
4824 {
4825 tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
4826 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, result_type,
4827 convert (stype, arg00),
4828 convert (stype, integer_zero_node)));
4829 }
4830
4831 /* Otherwise we have (A & C) != 0 where C is a single bit,
4832 convert that into ((A >> C2) & 1). Where C2 = log2(C).
4833 Similarly for (A & C) == 0. */
4834
4835 /* If INNER is a right shift of a constant and it plus BITNUM does
4836 not overflow, adjust BITNUM and INNER. */
4837 if (TREE_CODE (inner) == RSHIFT_EXPR
4838 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
4839 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
4840 && bitnum < TYPE_PRECISION (type)
4841 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
4842 bitnum - TYPE_PRECISION (type)))
4843 {
4844 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
4845 inner = TREE_OPERAND (inner, 0);
4846 }
4847
4848 /* If we are going to be able to omit the AND below, we must do our
4849 operations as unsigned. If we must use the AND, we have a choice.
4850 Normally unsigned is faster, but for some machines signed is. */
4851 ops_unsigned = (bitnum == TYPE_PRECISION (type) - 1 ? 1
4852 #ifdef LOAD_EXTEND_OP
4853 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
4854 #else
4855 : 1
4856 #endif
4857 );
4858
4859 signed_type = (*lang_hooks.types.type_for_mode) (operand_mode, 0);
4860 unsigned_type = (*lang_hooks.types.type_for_mode) (operand_mode, 1);
4861
4862 if (bitnum != 0)
4863 inner = build (RSHIFT_EXPR, ops_unsigned ? unsigned_type : signed_type,
4864 inner, size_int (bitnum));
4865
4866 if (code == EQ_EXPR)
4867 inner = build (BIT_XOR_EXPR, ops_unsigned ? unsigned_type : signed_type,
4868 inner, integer_one_node);
4869
4870 /* Put the AND last so it can combine with more things. */
4871 if (bitnum != TYPE_PRECISION (type) - 1)
4872 inner = build (BIT_AND_EXPR, ops_unsigned ? unsigned_type : signed_type,
4873 inner, integer_one_node);
4874
4875 /* Make sure to return the proper type. */
4876 if (TREE_TYPE (inner) != result_type)
4877 inner = convert (result_type, inner);
4878
4879 return inner;
4880 }
4881 return NULL_TREE;
4882 }
4883
4884 /* Perform constant folding and related simplification of EXPR.
4885 The related simplifications include x*1 => x, x*0 => 0, etc.,
4886 and application of the associative law.
4887 NOP_EXPR conversions may be removed freely (as long as we
4888 are careful not to change the C type of the overall expression)
4889 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4890 but we can constant-fold them if they have constant operands. */
4891
4892 tree
4893 fold (tree expr)
4894 {
4895 tree t = expr;
4896 tree t1 = NULL_TREE;
4897 tree tem;
4898 tree type = TREE_TYPE (expr);
4899 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4900 enum tree_code code = TREE_CODE (t);
4901 int kind = TREE_CODE_CLASS (code);
4902 int invert;
4903 /* WINS will be nonzero when the switch is done
4904 if all operands are constant. */
4905 int wins = 1;
4906
4907 /* Don't try to process an RTL_EXPR since its operands aren't trees.
4908 Likewise for a SAVE_EXPR that's already been evaluated. */
4909 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
4910 return t;
4911
4912 /* Return right away if a constant. */
4913 if (kind == 'c')
4914 return t;
4915
4916 #ifdef MAX_INTEGER_COMPUTATION_MODE
4917 check_max_integer_computation_mode (expr);
4918 #endif
4919
4920 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
4921 {
4922 tree subop;
4923
4924 /* Special case for conversion ops that can have fixed point args. */
4925 arg0 = TREE_OPERAND (t, 0);
4926
4927 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
4928 if (arg0 != 0)
4929 STRIP_SIGN_NOPS (arg0);
4930
4931 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
4932 subop = TREE_REALPART (arg0);
4933 else
4934 subop = arg0;
4935
4936 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
4937 && TREE_CODE (subop) != REAL_CST
4938 )
4939 /* Note that TREE_CONSTANT isn't enough:
4940 static var addresses are constant but we can't
4941 do arithmetic on them. */
4942 wins = 0;
4943 }
4944 else if (IS_EXPR_CODE_CLASS (kind) || kind == 'r')
4945 {
4946 int len = first_rtl_op (code);
4947 int i;
4948 for (i = 0; i < len; i++)
4949 {
4950 tree op = TREE_OPERAND (t, i);
4951 tree subop;
4952
4953 if (op == 0)
4954 continue; /* Valid for CALL_EXPR, at least. */
4955
4956 if (kind == '<' || code == RSHIFT_EXPR)
4957 {
4958 /* Signedness matters here. Perhaps we can refine this
4959 later. */
4960 STRIP_SIGN_NOPS (op);
4961 }
4962 else
4963 /* Strip any conversions that don't change the mode. */
4964 STRIP_NOPS (op);
4965
4966 if (TREE_CODE (op) == COMPLEX_CST)
4967 subop = TREE_REALPART (op);
4968 else
4969 subop = op;
4970
4971 if (TREE_CODE (subop) != INTEGER_CST
4972 && TREE_CODE (subop) != REAL_CST)
4973 /* Note that TREE_CONSTANT isn't enough:
4974 static var addresses are constant but we can't
4975 do arithmetic on them. */
4976 wins = 0;
4977
4978 if (i == 0)
4979 arg0 = op;
4980 else if (i == 1)
4981 arg1 = op;
4982 }
4983 }
4984
4985 /* If this is a commutative operation, and ARG0 is a constant, move it
4986 to ARG1 to reduce the number of tests below. */
4987 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
4988 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
4989 || code == BIT_AND_EXPR)
4990 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
4991 {
4992 tem = arg0; arg0 = arg1; arg1 = tem;
4993
4994 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
4995 TREE_OPERAND (t, 1) = tem;
4996 }
4997
4998 /* Now WINS is set as described above,
4999 ARG0 is the first operand of EXPR,
5000 and ARG1 is the second operand (if it has more than one operand).
5001
5002 First check for cases where an arithmetic operation is applied to a
5003 compound, conditional, or comparison operation. Push the arithmetic
5004 operation inside the compound or conditional to see if any folding
5005 can then be done. Convert comparison to conditional for this purpose.
5006 The also optimizes non-constant cases that used to be done in
5007 expand_expr.
5008
5009 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5010 one of the operands is a comparison and the other is a comparison, a
5011 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5012 code below would make the expression more complex. Change it to a
5013 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5014 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5015
5016 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5017 || code == EQ_EXPR || code == NE_EXPR)
5018 && ((truth_value_p (TREE_CODE (arg0))
5019 && (truth_value_p (TREE_CODE (arg1))
5020 || (TREE_CODE (arg1) == BIT_AND_EXPR
5021 && integer_onep (TREE_OPERAND (arg1, 1)))))
5022 || (truth_value_p (TREE_CODE (arg1))
5023 && (truth_value_p (TREE_CODE (arg0))
5024 || (TREE_CODE (arg0) == BIT_AND_EXPR
5025 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5026 {
5027 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5028 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5029 : TRUTH_XOR_EXPR,
5030 type, arg0, arg1));
5031
5032 if (code == EQ_EXPR)
5033 t = invert_truthvalue (t);
5034
5035 return t;
5036 }
5037
5038 if (TREE_CODE_CLASS (code) == '1')
5039 {
5040 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5041 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5042 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5043 else if (TREE_CODE (arg0) == COND_EXPR)
5044 {
5045 tree arg01 = TREE_OPERAND (arg0, 1);
5046 tree arg02 = TREE_OPERAND (arg0, 2);
5047 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5048 arg01 = fold (build1 (code, type, arg01));
5049 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5050 arg02 = fold (build1 (code, type, arg02));
5051 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5052 arg01, arg02));
5053
5054 /* If this was a conversion, and all we did was to move into
5055 inside the COND_EXPR, bring it back out. But leave it if
5056 it is a conversion from integer to integer and the
5057 result precision is no wider than a word since such a
5058 conversion is cheap and may be optimized away by combine,
5059 while it couldn't if it were outside the COND_EXPR. Then return
5060 so we don't get into an infinite recursion loop taking the
5061 conversion out and then back in. */
5062
5063 if ((code == NOP_EXPR || code == CONVERT_EXPR
5064 || code == NON_LVALUE_EXPR)
5065 && TREE_CODE (t) == COND_EXPR
5066 && TREE_CODE (TREE_OPERAND (t, 1)) == code
5067 && TREE_CODE (TREE_OPERAND (t, 2)) == code
5068 && ! VOID_TYPE_P (TREE_OPERAND (t, 1))
5069 && ! VOID_TYPE_P (TREE_OPERAND (t, 2))
5070 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
5071 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
5072 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
5073 && (INTEGRAL_TYPE_P
5074 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
5075 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
5076 t = build1 (code, type,
5077 build (COND_EXPR,
5078 TREE_TYPE (TREE_OPERAND
5079 (TREE_OPERAND (t, 1), 0)),
5080 TREE_OPERAND (t, 0),
5081 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
5082 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
5083 return t;
5084 }
5085 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5086 return fold (build (COND_EXPR, type, arg0,
5087 fold (build1 (code, type, integer_one_node)),
5088 fold (build1 (code, type, integer_zero_node))));
5089 }
5090 else if (TREE_CODE_CLASS (code) == '<'
5091 && TREE_CODE (arg0) == COMPOUND_EXPR)
5092 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5093 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5094 else if (TREE_CODE_CLASS (code) == '<'
5095 && TREE_CODE (arg1) == COMPOUND_EXPR)
5096 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5097 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5098 else if (TREE_CODE_CLASS (code) == '2'
5099 || TREE_CODE_CLASS (code) == '<')
5100 {
5101 if (TREE_CODE (arg1) == COMPOUND_EXPR
5102 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1, 0))
5103 && ! TREE_SIDE_EFFECTS (arg0))
5104 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5105 fold (build (code, type,
5106 arg0, TREE_OPERAND (arg1, 1))));
5107 else if ((TREE_CODE (arg1) == COND_EXPR
5108 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
5109 && TREE_CODE_CLASS (code) != '<'))
5110 && (TREE_CODE (arg0) != COND_EXPR
5111 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5112 && (! TREE_SIDE_EFFECTS (arg0)
5113 || ((*lang_hooks.decls.global_bindings_p) () == 0
5114 && ! CONTAINS_PLACEHOLDER_P (arg0))))
5115 return
5116 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5117 /*cond_first_p=*/0);
5118 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
5119 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5120 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5121 else if ((TREE_CODE (arg0) == COND_EXPR
5122 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5123 && TREE_CODE_CLASS (code) != '<'))
5124 && (TREE_CODE (arg1) != COND_EXPR
5125 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5126 && (! TREE_SIDE_EFFECTS (arg1)
5127 || ((*lang_hooks.decls.global_bindings_p) () == 0
5128 && ! CONTAINS_PLACEHOLDER_P (arg1))))
5129 return
5130 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5131 /*cond_first_p=*/1);
5132 }
5133
5134 switch (code)
5135 {
5136 case INTEGER_CST:
5137 case REAL_CST:
5138 case VECTOR_CST:
5139 case STRING_CST:
5140 case COMPLEX_CST:
5141 case CONSTRUCTOR:
5142 return t;
5143
5144 case CONST_DECL:
5145 return fold (DECL_INITIAL (t));
5146
5147 case NOP_EXPR:
5148 case FLOAT_EXPR:
5149 case CONVERT_EXPR:
5150 case FIX_TRUNC_EXPR:
5151 /* Other kinds of FIX are not handled properly by fold_convert. */
5152
5153 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
5154 return TREE_OPERAND (t, 0);
5155
5156 /* Handle cases of two conversions in a row. */
5157 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5158 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5159 {
5160 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5161 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5162 tree final_type = TREE_TYPE (t);
5163 int inside_int = INTEGRAL_TYPE_P (inside_type);
5164 int inside_ptr = POINTER_TYPE_P (inside_type);
5165 int inside_float = FLOAT_TYPE_P (inside_type);
5166 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5167 int inside_unsignedp = TREE_UNSIGNED (inside_type);
5168 int inter_int = INTEGRAL_TYPE_P (inter_type);
5169 int inter_ptr = POINTER_TYPE_P (inter_type);
5170 int inter_float = FLOAT_TYPE_P (inter_type);
5171 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5172 int inter_unsignedp = TREE_UNSIGNED (inter_type);
5173 int final_int = INTEGRAL_TYPE_P (final_type);
5174 int final_ptr = POINTER_TYPE_P (final_type);
5175 int final_float = FLOAT_TYPE_P (final_type);
5176 unsigned int final_prec = TYPE_PRECISION (final_type);
5177 int final_unsignedp = TREE_UNSIGNED (final_type);
5178
5179 /* In addition to the cases of two conversions in a row
5180 handled below, if we are converting something to its own
5181 type via an object of identical or wider precision, neither
5182 conversion is needed. */
5183 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
5184 && ((inter_int && final_int) || (inter_float && final_float))
5185 && inter_prec >= final_prec)
5186 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5187
5188 /* Likewise, if the intermediate and final types are either both
5189 float or both integer, we don't need the middle conversion if
5190 it is wider than the final type and doesn't change the signedness
5191 (for integers). Avoid this if the final type is a pointer
5192 since then we sometimes need the inner conversion. Likewise if
5193 the outer has a precision not equal to the size of its mode. */
5194 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5195 || (inter_float && inside_float))
5196 && inter_prec >= inside_prec
5197 && (inter_float || inter_unsignedp == inside_unsignedp)
5198 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5199 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5200 && ! final_ptr)
5201 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5202
5203 /* If we have a sign-extension of a zero-extended value, we can
5204 replace that by a single zero-extension. */
5205 if (inside_int && inter_int && final_int
5206 && inside_prec < inter_prec && inter_prec < final_prec
5207 && inside_unsignedp && !inter_unsignedp)
5208 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5209
5210 /* Two conversions in a row are not needed unless:
5211 - some conversion is floating-point (overstrict for now), or
5212 - the intermediate type is narrower than both initial and
5213 final, or
5214 - the intermediate type and innermost type differ in signedness,
5215 and the outermost type is wider than the intermediate, or
5216 - the initial type is a pointer type and the precisions of the
5217 intermediate and final types differ, or
5218 - the final type is a pointer type and the precisions of the
5219 initial and intermediate types differ. */
5220 if (! inside_float && ! inter_float && ! final_float
5221 && (inter_prec > inside_prec || inter_prec > final_prec)
5222 && ! (inside_int && inter_int
5223 && inter_unsignedp != inside_unsignedp
5224 && inter_prec < final_prec)
5225 && ((inter_unsignedp && inter_prec > inside_prec)
5226 == (final_unsignedp && final_prec > inter_prec))
5227 && ! (inside_ptr && inter_prec != final_prec)
5228 && ! (final_ptr && inside_prec != inter_prec)
5229 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5230 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5231 && ! final_ptr)
5232 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5233 }
5234
5235 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5236 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5237 /* Detect assigning a bitfield. */
5238 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5239 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5240 {
5241 /* Don't leave an assignment inside a conversion
5242 unless assigning a bitfield. */
5243 tree prev = TREE_OPERAND (t, 0);
5244 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
5245 /* First do the assignment, then return converted constant. */
5246 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
5247 TREE_USED (t) = 1;
5248 return t;
5249 }
5250
5251 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5252 constants (if x has signed type, the sign bit cannot be set
5253 in c). This folds extension into the BIT_AND_EXPR. */
5254 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
5255 && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
5256 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5257 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5258 {
5259 tree and = TREE_OPERAND (t, 0);
5260 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5261 int change = 0;
5262
5263 if (TREE_UNSIGNED (TREE_TYPE (and))
5264 || (TYPE_PRECISION (TREE_TYPE (t))
5265 <= TYPE_PRECISION (TREE_TYPE (and))))
5266 change = 1;
5267 else if (TYPE_PRECISION (TREE_TYPE (and1))
5268 <= HOST_BITS_PER_WIDE_INT
5269 && host_integerp (and1, 1))
5270 {
5271 unsigned HOST_WIDE_INT cst;
5272
5273 cst = tree_low_cst (and1, 1);
5274 cst &= (HOST_WIDE_INT) -1
5275 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5276 change = (cst == 0);
5277 #ifdef LOAD_EXTEND_OP
5278 if (change
5279 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5280 == ZERO_EXTEND))
5281 {
5282 tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
5283 and0 = convert (uns, and0);
5284 and1 = convert (uns, and1);
5285 }
5286 #endif
5287 }
5288 if (change)
5289 return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
5290 convert (TREE_TYPE (t), and0),
5291 convert (TREE_TYPE (t), and1)));
5292 }
5293
5294 if (!wins)
5295 {
5296 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
5297 return t;
5298 }
5299 return fold_convert (t, arg0);
5300
5301 case VIEW_CONVERT_EXPR:
5302 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5303 return build1 (VIEW_CONVERT_EXPR, type,
5304 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5305 return t;
5306
5307 case COMPONENT_REF:
5308 if (TREE_CODE (arg0) == CONSTRUCTOR
5309 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
5310 {
5311 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5312 if (m)
5313 t = TREE_VALUE (m);
5314 }
5315 return t;
5316
5317 case RANGE_EXPR:
5318 TREE_CONSTANT (t) = wins;
5319 return t;
5320
5321 case NEGATE_EXPR:
5322 if (wins)
5323 {
5324 if (TREE_CODE (arg0) == INTEGER_CST)
5325 {
5326 unsigned HOST_WIDE_INT low;
5327 HOST_WIDE_INT high;
5328 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5329 TREE_INT_CST_HIGH (arg0),
5330 &low, &high);
5331 t = build_int_2 (low, high);
5332 TREE_TYPE (t) = type;
5333 TREE_OVERFLOW (t)
5334 = (TREE_OVERFLOW (arg0)
5335 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
5336 TREE_CONSTANT_OVERFLOW (t)
5337 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5338 }
5339 else if (TREE_CODE (arg0) == REAL_CST)
5340 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5341 }
5342 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5343 return TREE_OPERAND (arg0, 0);
5344 /* Convert -((double)float) into (double)(-float). */
5345 else if (TREE_CODE (arg0) == NOP_EXPR
5346 && TREE_CODE (type) == REAL_TYPE)
5347 {
5348 tree targ0 = strip_float_extensions (arg0);
5349 if (targ0 != arg0)
5350 return convert (type, build1 (NEGATE_EXPR, TREE_TYPE (targ0), targ0));
5351
5352 }
5353
5354 /* Convert - (a - b) to (b - a) for non-floating-point. */
5355 else if (TREE_CODE (arg0) == MINUS_EXPR
5356 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
5357 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
5358 TREE_OPERAND (arg0, 0));
5359
5360 /* Convert -f(x) into f(-x) where f is sin, tan or atan. */
5361 switch (builtin_mathfn_code (arg0))
5362 {
5363 case BUILT_IN_SIN:
5364 case BUILT_IN_SINF:
5365 case BUILT_IN_SINL:
5366 case BUILT_IN_TAN:
5367 case BUILT_IN_TANF:
5368 case BUILT_IN_TANL:
5369 case BUILT_IN_ATAN:
5370 case BUILT_IN_ATANF:
5371 case BUILT_IN_ATANL:
5372 if (negate_expr_p (TREE_VALUE (TREE_OPERAND (arg0, 1))))
5373 {
5374 tree fndecl, arg, arglist;
5375
5376 fndecl = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5377 arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5378 arg = fold (build1 (NEGATE_EXPR, type, arg));
5379 arglist = build_tree_list (NULL_TREE, arg);
5380 return build_function_call_expr (fndecl, arglist);
5381 }
5382 break;
5383
5384 default:
5385 break;
5386 }
5387 return t;
5388
5389 case ABS_EXPR:
5390 if (wins)
5391 {
5392 if (TREE_CODE (arg0) == INTEGER_CST)
5393 {
5394 /* If the value is unsigned, then the absolute value is
5395 the same as the ordinary value. */
5396 if (TREE_UNSIGNED (type))
5397 return arg0;
5398 /* Similarly, if the value is non-negative. */
5399 else if (INT_CST_LT (integer_minus_one_node, arg0))
5400 return arg0;
5401 /* If the value is negative, then the absolute value is
5402 its negation. */
5403 else
5404 {
5405 unsigned HOST_WIDE_INT low;
5406 HOST_WIDE_INT high;
5407 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5408 TREE_INT_CST_HIGH (arg0),
5409 &low, &high);
5410 t = build_int_2 (low, high);
5411 TREE_TYPE (t) = type;
5412 TREE_OVERFLOW (t)
5413 = (TREE_OVERFLOW (arg0)
5414 | force_fit_type (t, overflow));
5415 TREE_CONSTANT_OVERFLOW (t)
5416 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5417 }
5418 }
5419 else if (TREE_CODE (arg0) == REAL_CST)
5420 {
5421 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5422 t = build_real (type,
5423 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5424 }
5425 }
5426 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5427 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
5428 /* Convert fabs((double)float) into (double)fabsf(float). */
5429 else if (TREE_CODE (arg0) == NOP_EXPR
5430 && TREE_CODE (type) == REAL_TYPE)
5431 {
5432 tree targ0 = strip_float_extensions (arg0);
5433 if (targ0 != arg0)
5434 return convert (type, fold (build1 (ABS_EXPR, TREE_TYPE (targ0),
5435 targ0)));
5436 }
5437 else if (tree_expr_nonnegative_p (arg0))
5438 return arg0;
5439 return t;
5440
5441 case CONJ_EXPR:
5442 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5443 return convert (type, arg0);
5444 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5445 return build (COMPLEX_EXPR, type,
5446 TREE_OPERAND (arg0, 0),
5447 negate_expr (TREE_OPERAND (arg0, 1)));
5448 else if (TREE_CODE (arg0) == COMPLEX_CST)
5449 return build_complex (type, TREE_REALPART (arg0),
5450 negate_expr (TREE_IMAGPART (arg0)));
5451 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5452 return fold (build (TREE_CODE (arg0), type,
5453 fold (build1 (CONJ_EXPR, type,
5454 TREE_OPERAND (arg0, 0))),
5455 fold (build1 (CONJ_EXPR,
5456 type, TREE_OPERAND (arg0, 1)))));
5457 else if (TREE_CODE (arg0) == CONJ_EXPR)
5458 return TREE_OPERAND (arg0, 0);
5459 return t;
5460
5461 case BIT_NOT_EXPR:
5462 if (wins)
5463 {
5464 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5465 ~ TREE_INT_CST_HIGH (arg0));
5466 TREE_TYPE (t) = type;
5467 force_fit_type (t, 0);
5468 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5469 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5470 }
5471 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5472 return TREE_OPERAND (arg0, 0);
5473 return t;
5474
5475 case PLUS_EXPR:
5476 /* A + (-B) -> A - B */
5477 if (TREE_CODE (arg1) == NEGATE_EXPR)
5478 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5479 /* (-A) + B -> B - A */
5480 if (TREE_CODE (arg0) == NEGATE_EXPR)
5481 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5482 else if (! FLOAT_TYPE_P (type))
5483 {
5484 if (integer_zerop (arg1))
5485 return non_lvalue (convert (type, arg0));
5486
5487 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5488 with a constant, and the two constants have no bits in common,
5489 we should treat this as a BIT_IOR_EXPR since this may produce more
5490 simplifications. */
5491 if (TREE_CODE (arg0) == BIT_AND_EXPR
5492 && TREE_CODE (arg1) == BIT_AND_EXPR
5493 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5494 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5495 && integer_zerop (const_binop (BIT_AND_EXPR,
5496 TREE_OPERAND (arg0, 1),
5497 TREE_OPERAND (arg1, 1), 0)))
5498 {
5499 code = BIT_IOR_EXPR;
5500 goto bit_ior;
5501 }
5502
5503 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5504 (plus (plus (mult) (mult)) (foo)) so that we can
5505 take advantage of the factoring cases below. */
5506 if ((TREE_CODE (arg0) == PLUS_EXPR
5507 && TREE_CODE (arg1) == MULT_EXPR)
5508 || (TREE_CODE (arg1) == PLUS_EXPR
5509 && TREE_CODE (arg0) == MULT_EXPR))
5510 {
5511 tree parg0, parg1, parg, marg;
5512
5513 if (TREE_CODE (arg0) == PLUS_EXPR)
5514 parg = arg0, marg = arg1;
5515 else
5516 parg = arg1, marg = arg0;
5517 parg0 = TREE_OPERAND (parg, 0);
5518 parg1 = TREE_OPERAND (parg, 1);
5519 STRIP_NOPS (parg0);
5520 STRIP_NOPS (parg1);
5521
5522 if (TREE_CODE (parg0) == MULT_EXPR
5523 && TREE_CODE (parg1) != MULT_EXPR)
5524 return fold (build (PLUS_EXPR, type,
5525 fold (build (PLUS_EXPR, type,
5526 convert (type, parg0),
5527 convert (type, marg))),
5528 convert (type, parg1)));
5529 if (TREE_CODE (parg0) != MULT_EXPR
5530 && TREE_CODE (parg1) == MULT_EXPR)
5531 return fold (build (PLUS_EXPR, type,
5532 fold (build (PLUS_EXPR, type,
5533 convert (type, parg1),
5534 convert (type, marg))),
5535 convert (type, parg0)));
5536 }
5537
5538 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5539 {
5540 tree arg00, arg01, arg10, arg11;
5541 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5542
5543 /* (A * C) + (B * C) -> (A+B) * C.
5544 We are most concerned about the case where C is a constant,
5545 but other combinations show up during loop reduction. Since
5546 it is not difficult, try all four possibilities. */
5547
5548 arg00 = TREE_OPERAND (arg0, 0);
5549 arg01 = TREE_OPERAND (arg0, 1);
5550 arg10 = TREE_OPERAND (arg1, 0);
5551 arg11 = TREE_OPERAND (arg1, 1);
5552 same = NULL_TREE;
5553
5554 if (operand_equal_p (arg01, arg11, 0))
5555 same = arg01, alt0 = arg00, alt1 = arg10;
5556 else if (operand_equal_p (arg00, arg10, 0))
5557 same = arg00, alt0 = arg01, alt1 = arg11;
5558 else if (operand_equal_p (arg00, arg11, 0))
5559 same = arg00, alt0 = arg01, alt1 = arg10;
5560 else if (operand_equal_p (arg01, arg10, 0))
5561 same = arg01, alt0 = arg00, alt1 = arg11;
5562
5563 /* No identical multiplicands; see if we can find a common
5564 power-of-two factor in non-power-of-two multiplies. This
5565 can help in multi-dimensional array access. */
5566 else if (TREE_CODE (arg01) == INTEGER_CST
5567 && TREE_CODE (arg11) == INTEGER_CST
5568 && TREE_INT_CST_HIGH (arg01) == 0
5569 && TREE_INT_CST_HIGH (arg11) == 0)
5570 {
5571 HOST_WIDE_INT int01, int11, tmp;
5572 int01 = TREE_INT_CST_LOW (arg01);
5573 int11 = TREE_INT_CST_LOW (arg11);
5574
5575 /* Move min of absolute values to int11. */
5576 if ((int01 >= 0 ? int01 : -int01)
5577 < (int11 >= 0 ? int11 : -int11))
5578 {
5579 tmp = int01, int01 = int11, int11 = tmp;
5580 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5581 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5582 }
5583
5584 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5585 {
5586 alt0 = fold (build (MULT_EXPR, type, arg00,
5587 build_int_2 (int01 / int11, 0)));
5588 alt1 = arg10;
5589 same = arg11;
5590 }
5591 }
5592
5593 if (same)
5594 return fold (build (MULT_EXPR, type,
5595 fold (build (PLUS_EXPR, type, alt0, alt1)),
5596 same));
5597 }
5598 }
5599
5600 /* See if ARG1 is zero and X + ARG1 reduces to X. */
5601 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
5602 return non_lvalue (convert (type, arg0));
5603
5604 /* Likewise if the operands are reversed. */
5605 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5606 return non_lvalue (convert (type, arg1));
5607
5608 bit_rotate:
5609 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5610 is a rotate of A by C1 bits. */
5611 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5612 is a rotate of A by B bits. */
5613 {
5614 enum tree_code code0, code1;
5615 code0 = TREE_CODE (arg0);
5616 code1 = TREE_CODE (arg1);
5617 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5618 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5619 && operand_equal_p (TREE_OPERAND (arg0, 0),
5620 TREE_OPERAND (arg1, 0), 0)
5621 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5622 {
5623 tree tree01, tree11;
5624 enum tree_code code01, code11;
5625
5626 tree01 = TREE_OPERAND (arg0, 1);
5627 tree11 = TREE_OPERAND (arg1, 1);
5628 STRIP_NOPS (tree01);
5629 STRIP_NOPS (tree11);
5630 code01 = TREE_CODE (tree01);
5631 code11 = TREE_CODE (tree11);
5632 if (code01 == INTEGER_CST
5633 && code11 == INTEGER_CST
5634 && TREE_INT_CST_HIGH (tree01) == 0
5635 && TREE_INT_CST_HIGH (tree11) == 0
5636 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5637 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5638 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5639 code0 == LSHIFT_EXPR ? tree01 : tree11);
5640 else if (code11 == MINUS_EXPR)
5641 {
5642 tree tree110, tree111;
5643 tree110 = TREE_OPERAND (tree11, 0);
5644 tree111 = TREE_OPERAND (tree11, 1);
5645 STRIP_NOPS (tree110);
5646 STRIP_NOPS (tree111);
5647 if (TREE_CODE (tree110) == INTEGER_CST
5648 && 0 == compare_tree_int (tree110,
5649 TYPE_PRECISION
5650 (TREE_TYPE (TREE_OPERAND
5651 (arg0, 0))))
5652 && operand_equal_p (tree01, tree111, 0))
5653 return build ((code0 == LSHIFT_EXPR
5654 ? LROTATE_EXPR
5655 : RROTATE_EXPR),
5656 type, TREE_OPERAND (arg0, 0), tree01);
5657 }
5658 else if (code01 == MINUS_EXPR)
5659 {
5660 tree tree010, tree011;
5661 tree010 = TREE_OPERAND (tree01, 0);
5662 tree011 = TREE_OPERAND (tree01, 1);
5663 STRIP_NOPS (tree010);
5664 STRIP_NOPS (tree011);
5665 if (TREE_CODE (tree010) == INTEGER_CST
5666 && 0 == compare_tree_int (tree010,
5667 TYPE_PRECISION
5668 (TREE_TYPE (TREE_OPERAND
5669 (arg0, 0))))
5670 && operand_equal_p (tree11, tree011, 0))
5671 return build ((code0 != LSHIFT_EXPR
5672 ? LROTATE_EXPR
5673 : RROTATE_EXPR),
5674 type, TREE_OPERAND (arg0, 0), tree11);
5675 }
5676 }
5677 }
5678
5679 associate:
5680 /* In most languages, can't associate operations on floats through
5681 parentheses. Rather than remember where the parentheses were, we
5682 don't associate floats at all. It shouldn't matter much. However,
5683 associating multiplications is only very slightly inaccurate, so do
5684 that if -funsafe-math-optimizations is specified. */
5685
5686 if (! wins
5687 && (! FLOAT_TYPE_P (type)
5688 || (flag_unsafe_math_optimizations && code == MULT_EXPR)))
5689 {
5690 tree var0, con0, lit0, minus_lit0;
5691 tree var1, con1, lit1, minus_lit1;
5692
5693 /* Split both trees into variables, constants, and literals. Then
5694 associate each group together, the constants with literals,
5695 then the result with variables. This increases the chances of
5696 literals being recombined later and of generating relocatable
5697 expressions for the sum of a constant and literal. */
5698 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
5699 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
5700 code == MINUS_EXPR);
5701
5702 /* Only do something if we found more than two objects. Otherwise,
5703 nothing has changed and we risk infinite recursion. */
5704 if (2 < ((var0 != 0) + (var1 != 0)
5705 + (con0 != 0) + (con1 != 0)
5706 + (lit0 != 0) + (lit1 != 0)
5707 + (minus_lit0 != 0) + (minus_lit1 != 0)))
5708 {
5709 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
5710 if (code == MINUS_EXPR)
5711 code = PLUS_EXPR;
5712
5713 var0 = associate_trees (var0, var1, code, type);
5714 con0 = associate_trees (con0, con1, code, type);
5715 lit0 = associate_trees (lit0, lit1, code, type);
5716 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
5717
5718 /* Preserve the MINUS_EXPR if the negative part of the literal is
5719 greater than the positive part. Otherwise, the multiplicative
5720 folding code (i.e extract_muldiv) may be fooled in case
5721 unsigned constants are subtracted, like in the following
5722 example: ((X*2 + 4) - 8U)/2. */
5723 if (minus_lit0 && lit0)
5724 {
5725 if (tree_int_cst_lt (lit0, minus_lit0))
5726 {
5727 minus_lit0 = associate_trees (minus_lit0, lit0,
5728 MINUS_EXPR, type);
5729 lit0 = 0;
5730 }
5731 else
5732 {
5733 lit0 = associate_trees (lit0, minus_lit0,
5734 MINUS_EXPR, type);
5735 minus_lit0 = 0;
5736 }
5737 }
5738 if (minus_lit0)
5739 {
5740 if (con0 == 0)
5741 return convert (type, associate_trees (var0, minus_lit0,
5742 MINUS_EXPR, type));
5743 else
5744 {
5745 con0 = associate_trees (con0, minus_lit0,
5746 MINUS_EXPR, type);
5747 return convert (type, associate_trees (var0, con0,
5748 PLUS_EXPR, type));
5749 }
5750 }
5751
5752 con0 = associate_trees (con0, lit0, code, type);
5753 return convert (type, associate_trees (var0, con0, code, type));
5754 }
5755 }
5756
5757 binary:
5758 if (wins)
5759 t1 = const_binop (code, arg0, arg1, 0);
5760 if (t1 != NULL_TREE)
5761 {
5762 /* The return value should always have
5763 the same type as the original expression. */
5764 if (TREE_TYPE (t1) != TREE_TYPE (t))
5765 t1 = convert (TREE_TYPE (t), t1);
5766
5767 return t1;
5768 }
5769 return t;
5770
5771 case MINUS_EXPR:
5772 /* A - (-B) -> A + B */
5773 if (TREE_CODE (arg1) == NEGATE_EXPR)
5774 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5775 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
5776 if (TREE_CODE (arg0) == NEGATE_EXPR
5777 && (FLOAT_TYPE_P (type)
5778 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
5779 && negate_expr_p (arg1)
5780 && (! TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
5781 && (! TREE_SIDE_EFFECTS (arg1) || TREE_CONSTANT (arg0)))
5782 return fold (build (MINUS_EXPR, type, negate_expr (arg1),
5783 TREE_OPERAND (arg0, 0)));
5784
5785 if (! FLOAT_TYPE_P (type))
5786 {
5787 if (! wins && integer_zerop (arg0))
5788 return negate_expr (convert (type, arg1));
5789 if (integer_zerop (arg1))
5790 return non_lvalue (convert (type, arg0));
5791
5792 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
5793 about the case where C is a constant, just try one of the
5794 four possibilities. */
5795
5796 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
5797 && operand_equal_p (TREE_OPERAND (arg0, 1),
5798 TREE_OPERAND (arg1, 1), 0))
5799 return fold (build (MULT_EXPR, type,
5800 fold (build (MINUS_EXPR, type,
5801 TREE_OPERAND (arg0, 0),
5802 TREE_OPERAND (arg1, 0))),
5803 TREE_OPERAND (arg0, 1)));
5804
5805 /* Fold A - (A & B) into ~B & A. */
5806 if (!TREE_SIDE_EFFECTS (arg0)
5807 && TREE_CODE (arg1) == BIT_AND_EXPR)
5808 {
5809 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
5810 return fold (build (BIT_AND_EXPR, type,
5811 fold (build1 (BIT_NOT_EXPR, type,
5812 TREE_OPERAND (arg1, 0))),
5813 arg0));
5814 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
5815 return fold (build (BIT_AND_EXPR, type,
5816 fold (build1 (BIT_NOT_EXPR, type,
5817 TREE_OPERAND (arg1, 1))),
5818 arg0));
5819 }
5820 }
5821
5822 /* See if ARG1 is zero and X - ARG1 reduces to X. */
5823 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
5824 return non_lvalue (convert (type, arg0));
5825
5826 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
5827 ARG0 is zero and X + ARG0 reduces to X, since that would mean
5828 (-ARG1 + ARG0) reduces to -ARG1. */
5829 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5830 return negate_expr (convert (type, arg1));
5831
5832 /* Fold &x - &x. This can happen from &x.foo - &x.
5833 This is unsafe for certain floats even in non-IEEE formats.
5834 In IEEE, it is unsafe because it does wrong for NaNs.
5835 Also note that operand_equal_p is always false if an operand
5836 is volatile. */
5837
5838 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
5839 && operand_equal_p (arg0, arg1, 0))
5840 return convert (type, integer_zero_node);
5841
5842 goto associate;
5843
5844 case MULT_EXPR:
5845 /* (-A) * (-B) -> A * B */
5846 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5847 return fold (build (MULT_EXPR, type, TREE_OPERAND (arg0, 0),
5848 TREE_OPERAND (arg1, 0)));
5849
5850 if (! FLOAT_TYPE_P (type))
5851 {
5852 if (integer_zerop (arg1))
5853 return omit_one_operand (type, arg1, arg0);
5854 if (integer_onep (arg1))
5855 return non_lvalue (convert (type, arg0));
5856
5857 /* (a * (1 << b)) is (a << b) */
5858 if (TREE_CODE (arg1) == LSHIFT_EXPR
5859 && integer_onep (TREE_OPERAND (arg1, 0)))
5860 return fold (build (LSHIFT_EXPR, type, arg0,
5861 TREE_OPERAND (arg1, 1)));
5862 if (TREE_CODE (arg0) == LSHIFT_EXPR
5863 && integer_onep (TREE_OPERAND (arg0, 0)))
5864 return fold (build (LSHIFT_EXPR, type, arg1,
5865 TREE_OPERAND (arg0, 1)));
5866
5867 if (TREE_CODE (arg1) == INTEGER_CST
5868 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
5869 convert (type, arg1),
5870 code, NULL_TREE)))
5871 return convert (type, tem);
5872
5873 }
5874 else
5875 {
5876 /* Maybe fold x * 0 to 0. The expressions aren't the same
5877 when x is NaN, since x * 0 is also NaN. Nor are they the
5878 same in modes with signed zeros, since multiplying a
5879 negative value by 0 gives -0, not +0. */
5880 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
5881 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
5882 && real_zerop (arg1))
5883 return omit_one_operand (type, arg1, arg0);
5884 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
5885 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
5886 && real_onep (arg1))
5887 return non_lvalue (convert (type, arg0));
5888
5889 /* Transform x * -1.0 into -x. */
5890 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
5891 && real_minus_onep (arg1))
5892 return fold (build1 (NEGATE_EXPR, type, arg0));
5893
5894 /* x*2 is x+x */
5895 if (! wins && real_twop (arg1)
5896 && (*lang_hooks.decls.global_bindings_p) () == 0
5897 && ! CONTAINS_PLACEHOLDER_P (arg0))
5898 {
5899 tree arg = save_expr (arg0);
5900 return fold (build (PLUS_EXPR, type, arg, arg));
5901 }
5902
5903 if (flag_unsafe_math_optimizations)
5904 {
5905 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
5906 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
5907
5908 /* Optimizations of sqrt(...)*sqrt(...). */
5909 if ((fcode0 == BUILT_IN_SQRT && fcode1 == BUILT_IN_SQRT)
5910 || (fcode0 == BUILT_IN_SQRTF && fcode1 == BUILT_IN_SQRTF)
5911 || (fcode0 == BUILT_IN_SQRTL && fcode1 == BUILT_IN_SQRTL))
5912 {
5913 tree sqrtfn, arg, arglist;
5914 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
5915 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
5916
5917 /* Optimize sqrt(x)*sqrt(x) as x. */
5918 if (operand_equal_p (arg00, arg10, 0)
5919 && ! HONOR_SNANS (TYPE_MODE (type)))
5920 return arg00;
5921
5922 /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y). */
5923 sqrtfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5924 arg = fold (build (MULT_EXPR, type, arg00, arg10));
5925 arglist = build_tree_list (NULL_TREE, arg);
5926 return build_function_call_expr (sqrtfn, arglist);
5927 }
5928
5929 /* Optimize exp(x)*exp(y) as exp(x+y). */
5930 if ((fcode0 == BUILT_IN_EXP && fcode1 == BUILT_IN_EXP)
5931 || (fcode0 == BUILT_IN_EXPF && fcode1 == BUILT_IN_EXPF)
5932 || (fcode0 == BUILT_IN_EXPL && fcode1 == BUILT_IN_EXPL))
5933 {
5934 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5935 tree arg = build (PLUS_EXPR, type,
5936 TREE_VALUE (TREE_OPERAND (arg0, 1)),
5937 TREE_VALUE (TREE_OPERAND (arg1, 1)));
5938 tree arglist = build_tree_list (NULL_TREE, fold (arg));
5939 return build_function_call_expr (expfn, arglist);
5940 }
5941
5942 /* Optimizations of pow(...)*pow(...). */
5943 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
5944 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
5945 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
5946 {
5947 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
5948 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
5949 1)));
5950 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
5951 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
5952 1)));
5953
5954 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
5955 if (operand_equal_p (arg01, arg11, 0))
5956 {
5957 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5958 tree arg = build (MULT_EXPR, type, arg00, arg10);
5959 tree arglist = tree_cons (NULL_TREE, fold (arg),
5960 build_tree_list (NULL_TREE,
5961 arg01));
5962 return build_function_call_expr (powfn, arglist);
5963 }
5964
5965 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
5966 if (operand_equal_p (arg00, arg10, 0))
5967 {
5968 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5969 tree arg = fold (build (PLUS_EXPR, type, arg01, arg11));
5970 tree arglist = tree_cons (NULL_TREE, arg00,
5971 build_tree_list (NULL_TREE,
5972 arg));
5973 return build_function_call_expr (powfn, arglist);
5974 }
5975 }
5976
5977 /* Optimize tan(x)*cos(x) as sin(x). */
5978 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
5979 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
5980 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
5981 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
5982 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
5983 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
5984 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
5985 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
5986 {
5987 tree sinfn;
5988
5989 switch (fcode0)
5990 {
5991 case BUILT_IN_TAN:
5992 case BUILT_IN_COS:
5993 sinfn = implicit_built_in_decls[BUILT_IN_SIN];
5994 break;
5995 case BUILT_IN_TANF:
5996 case BUILT_IN_COSF:
5997 sinfn = implicit_built_in_decls[BUILT_IN_SINF];
5998 break;
5999 case BUILT_IN_TANL:
6000 case BUILT_IN_COSL:
6001 sinfn = implicit_built_in_decls[BUILT_IN_SINL];
6002 break;
6003 default:
6004 sinfn = NULL_TREE;
6005 }
6006
6007 if (sinfn != NULL_TREE)
6008 return build_function_call_expr (sinfn,
6009 TREE_OPERAND (arg0, 1));
6010 }
6011 }
6012 }
6013 goto associate;
6014
6015 case BIT_IOR_EXPR:
6016 bit_ior:
6017 if (integer_all_onesp (arg1))
6018 return omit_one_operand (type, arg1, arg0);
6019 if (integer_zerop (arg1))
6020 return non_lvalue (convert (type, arg0));
6021 t1 = distribute_bit_expr (code, type, arg0, arg1);
6022 if (t1 != NULL_TREE)
6023 return t1;
6024
6025 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6026
6027 This results in more efficient code for machines without a NAND
6028 instruction. Combine will canonicalize to the first form
6029 which will allow use of NAND instructions provided by the
6030 backend if they exist. */
6031 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6032 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6033 {
6034 return fold (build1 (BIT_NOT_EXPR, type,
6035 build (BIT_AND_EXPR, type,
6036 TREE_OPERAND (arg0, 0),
6037 TREE_OPERAND (arg1, 0))));
6038 }
6039
6040 /* See if this can be simplified into a rotate first. If that
6041 is unsuccessful continue in the association code. */
6042 goto bit_rotate;
6043
6044 case BIT_XOR_EXPR:
6045 if (integer_zerop (arg1))
6046 return non_lvalue (convert (type, arg0));
6047 if (integer_all_onesp (arg1))
6048 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6049
6050 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6051 with a constant, and the two constants have no bits in common,
6052 we should treat this as a BIT_IOR_EXPR since this may produce more
6053 simplifications. */
6054 if (TREE_CODE (arg0) == BIT_AND_EXPR
6055 && TREE_CODE (arg1) == BIT_AND_EXPR
6056 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6057 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6058 && integer_zerop (const_binop (BIT_AND_EXPR,
6059 TREE_OPERAND (arg0, 1),
6060 TREE_OPERAND (arg1, 1), 0)))
6061 {
6062 code = BIT_IOR_EXPR;
6063 goto bit_ior;
6064 }
6065
6066 /* See if this can be simplified into a rotate first. If that
6067 is unsuccessful continue in the association code. */
6068 goto bit_rotate;
6069
6070 case BIT_AND_EXPR:
6071 bit_and:
6072 if (integer_all_onesp (arg1))
6073 return non_lvalue (convert (type, arg0));
6074 if (integer_zerop (arg1))
6075 return omit_one_operand (type, arg1, arg0);
6076 t1 = distribute_bit_expr (code, type, arg0, arg1);
6077 if (t1 != NULL_TREE)
6078 return t1;
6079 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6080 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6081 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6082 {
6083 unsigned int prec
6084 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6085
6086 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6087 && (~TREE_INT_CST_LOW (arg1)
6088 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6089 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
6090 }
6091
6092 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6093
6094 This results in more efficient code for machines without a NOR
6095 instruction. Combine will canonicalize to the first form
6096 which will allow use of NOR instructions provided by the
6097 backend if they exist. */
6098 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6099 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6100 {
6101 return fold (build1 (BIT_NOT_EXPR, type,
6102 build (BIT_IOR_EXPR, type,
6103 TREE_OPERAND (arg0, 0),
6104 TREE_OPERAND (arg1, 0))));
6105 }
6106
6107 goto associate;
6108
6109 case BIT_ANDTC_EXPR:
6110 if (integer_all_onesp (arg0))
6111 return non_lvalue (convert (type, arg1));
6112 if (integer_zerop (arg0))
6113 return omit_one_operand (type, arg0, arg1);
6114 if (TREE_CODE (arg1) == INTEGER_CST)
6115 {
6116 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
6117 code = BIT_AND_EXPR;
6118 goto bit_and;
6119 }
6120 goto binary;
6121
6122 case RDIV_EXPR:
6123 /* Don't touch a floating-point divide by zero unless the mode
6124 of the constant can represent infinity. */
6125 if (TREE_CODE (arg1) == REAL_CST
6126 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6127 && real_zerop (arg1))
6128 return t;
6129
6130 /* (-A) / (-B) -> A / B */
6131 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
6132 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6133 TREE_OPERAND (arg1, 0)));
6134
6135 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6136 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6137 && real_onep (arg1))
6138 return non_lvalue (convert (type, arg0));
6139
6140 /* If ARG1 is a constant, we can convert this to a multiply by the
6141 reciprocal. This does not have the same rounding properties,
6142 so only do this if -funsafe-math-optimizations. We can actually
6143 always safely do it if ARG1 is a power of two, but it's hard to
6144 tell if it is or not in a portable manner. */
6145 if (TREE_CODE (arg1) == REAL_CST)
6146 {
6147 if (flag_unsafe_math_optimizations
6148 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6149 arg1, 0)))
6150 return fold (build (MULT_EXPR, type, arg0, tem));
6151 /* Find the reciprocal if optimizing and the result is exact. */
6152 else if (optimize)
6153 {
6154 REAL_VALUE_TYPE r;
6155 r = TREE_REAL_CST (arg1);
6156 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6157 {
6158 tem = build_real (type, r);
6159 return fold (build (MULT_EXPR, type, arg0, tem));
6160 }
6161 }
6162 }
6163 /* Convert A/B/C to A/(B*C). */
6164 if (flag_unsafe_math_optimizations
6165 && TREE_CODE (arg0) == RDIV_EXPR)
6166 {
6167 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6168 build (MULT_EXPR, type, TREE_OPERAND (arg0, 1),
6169 arg1)));
6170 }
6171 /* Convert A/(B/C) to (A/B)*C. */
6172 if (flag_unsafe_math_optimizations
6173 && TREE_CODE (arg1) == RDIV_EXPR)
6174 {
6175 return fold (build (MULT_EXPR, type,
6176 build (RDIV_EXPR, type, arg0,
6177 TREE_OPERAND (arg1, 0)),
6178 TREE_OPERAND (arg1, 1)));
6179 }
6180
6181 if (flag_unsafe_math_optimizations)
6182 {
6183 enum built_in_function fcode = builtin_mathfn_code (arg1);
6184 /* Optimize x/exp(y) into x*exp(-y). */
6185 if (fcode == BUILT_IN_EXP
6186 || fcode == BUILT_IN_EXPF
6187 || fcode == BUILT_IN_EXPL)
6188 {
6189 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6190 tree arg = build1 (NEGATE_EXPR, type,
6191 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6192 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6193 arg1 = build_function_call_expr (expfn, arglist);
6194 return fold (build (MULT_EXPR, type, arg0, arg1));
6195 }
6196
6197 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6198 if (fcode == BUILT_IN_POW
6199 || fcode == BUILT_IN_POWF
6200 || fcode == BUILT_IN_POWL)
6201 {
6202 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6203 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6204 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6205 tree neg11 = fold (build1 (NEGATE_EXPR, type, arg11));
6206 tree arglist = tree_cons(NULL_TREE, arg10,
6207 build_tree_list (NULL_TREE, neg11));
6208 arg1 = build_function_call_expr (powfn, arglist);
6209 return fold (build (MULT_EXPR, type, arg0, arg1));
6210 }
6211 }
6212
6213 if (flag_unsafe_math_optimizations)
6214 {
6215 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6216 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6217
6218 /* Optimize sin(x)/cos(x) as tan(x). */
6219 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
6220 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
6221 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
6222 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6223 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6224 {
6225 tree tanfn;
6226
6227 if (fcode0 == BUILT_IN_SIN)
6228 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6229 else if (fcode0 == BUILT_IN_SINF)
6230 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6231 else if (fcode0 == BUILT_IN_SINL)
6232 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6233 else
6234 tanfn = NULL_TREE;
6235
6236 if (tanfn != NULL_TREE)
6237 return build_function_call_expr (tanfn,
6238 TREE_OPERAND (arg0, 1));
6239 }
6240
6241 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
6242 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
6243 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
6244 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
6245 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6246 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6247 {
6248 tree tanfn;
6249
6250 if (fcode0 == BUILT_IN_COS)
6251 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6252 else if (fcode0 == BUILT_IN_COSF)
6253 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6254 else if (fcode0 == BUILT_IN_COSL)
6255 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6256 else
6257 tanfn = NULL_TREE;
6258
6259 if (tanfn != NULL_TREE)
6260 {
6261 tree tmp = TREE_OPERAND (arg0, 1);
6262 tmp = build_function_call_expr (tanfn, tmp);
6263 return fold (build (RDIV_EXPR, type,
6264 build_real (type, dconst1),
6265 tmp));
6266 }
6267 }
6268 }
6269 goto binary;
6270
6271 case TRUNC_DIV_EXPR:
6272 case ROUND_DIV_EXPR:
6273 case FLOOR_DIV_EXPR:
6274 case CEIL_DIV_EXPR:
6275 case EXACT_DIV_EXPR:
6276 if (integer_onep (arg1))
6277 return non_lvalue (convert (type, arg0));
6278 if (integer_zerop (arg1))
6279 return t;
6280
6281 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6282 operation, EXACT_DIV_EXPR.
6283
6284 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6285 At one time others generated faster code, it's not clear if they do
6286 after the last round to changes to the DIV code in expmed.c. */
6287 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
6288 && multiple_of_p (type, arg0, arg1))
6289 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
6290
6291 if (TREE_CODE (arg1) == INTEGER_CST
6292 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6293 code, NULL_TREE)))
6294 return convert (type, tem);
6295
6296 goto binary;
6297
6298 case CEIL_MOD_EXPR:
6299 case FLOOR_MOD_EXPR:
6300 case ROUND_MOD_EXPR:
6301 case TRUNC_MOD_EXPR:
6302 if (integer_onep (arg1))
6303 return omit_one_operand (type, integer_zero_node, arg0);
6304 if (integer_zerop (arg1))
6305 return t;
6306
6307 if (TREE_CODE (arg1) == INTEGER_CST
6308 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6309 code, NULL_TREE)))
6310 return convert (type, tem);
6311
6312 goto binary;
6313
6314 case LROTATE_EXPR:
6315 case RROTATE_EXPR:
6316 if (integer_all_onesp (arg0))
6317 return omit_one_operand (type, arg0, arg1);
6318 goto shift;
6319
6320 case RSHIFT_EXPR:
6321 /* Optimize -1 >> x for arithmetic right shifts. */
6322 if (integer_all_onesp (arg0) && ! TREE_UNSIGNED (type))
6323 return omit_one_operand (type, arg0, arg1);
6324 /* ... fall through ... */
6325
6326 case LSHIFT_EXPR:
6327 shift:
6328 if (integer_zerop (arg1))
6329 return non_lvalue (convert (type, arg0));
6330 if (integer_zerop (arg0))
6331 return omit_one_operand (type, arg0, arg1);
6332
6333 /* Since negative shift count is not well-defined,
6334 don't try to compute it in the compiler. */
6335 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
6336 return t;
6337 /* Rewrite an LROTATE_EXPR by a constant into an
6338 RROTATE_EXPR by a new constant. */
6339 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
6340 {
6341 TREE_SET_CODE (t, RROTATE_EXPR);
6342 code = RROTATE_EXPR;
6343 TREE_OPERAND (t, 1) = arg1
6344 = const_binop
6345 (MINUS_EXPR,
6346 convert (TREE_TYPE (arg1),
6347 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
6348 arg1, 0);
6349 if (tree_int_cst_sgn (arg1) < 0)
6350 return t;
6351 }
6352
6353 /* If we have a rotate of a bit operation with the rotate count and
6354 the second operand of the bit operation both constant,
6355 permute the two operations. */
6356 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6357 && (TREE_CODE (arg0) == BIT_AND_EXPR
6358 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
6359 || TREE_CODE (arg0) == BIT_IOR_EXPR
6360 || TREE_CODE (arg0) == BIT_XOR_EXPR)
6361 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6362 return fold (build (TREE_CODE (arg0), type,
6363 fold (build (code, type,
6364 TREE_OPERAND (arg0, 0), arg1)),
6365 fold (build (code, type,
6366 TREE_OPERAND (arg0, 1), arg1))));
6367
6368 /* Two consecutive rotates adding up to the width of the mode can
6369 be ignored. */
6370 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6371 && TREE_CODE (arg0) == RROTATE_EXPR
6372 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6373 && TREE_INT_CST_HIGH (arg1) == 0
6374 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
6375 && ((TREE_INT_CST_LOW (arg1)
6376 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
6377 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
6378 return TREE_OPERAND (arg0, 0);
6379
6380 goto binary;
6381
6382 case MIN_EXPR:
6383 if (operand_equal_p (arg0, arg1, 0))
6384 return omit_one_operand (type, arg0, arg1);
6385 if (INTEGRAL_TYPE_P (type)
6386 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
6387 return omit_one_operand (type, arg1, arg0);
6388 goto associate;
6389
6390 case MAX_EXPR:
6391 if (operand_equal_p (arg0, arg1, 0))
6392 return omit_one_operand (type, arg0, arg1);
6393 if (INTEGRAL_TYPE_P (type)
6394 && TYPE_MAX_VALUE (type)
6395 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
6396 return omit_one_operand (type, arg1, arg0);
6397 goto associate;
6398
6399 case TRUTH_NOT_EXPR:
6400 /* Note that the operand of this must be an int
6401 and its values must be 0 or 1.
6402 ("true" is a fixed value perhaps depending on the language,
6403 but we don't handle values other than 1 correctly yet.) */
6404 tem = invert_truthvalue (arg0);
6405 /* Avoid infinite recursion. */
6406 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6407 {
6408 tem = fold_single_bit_test (code, arg0, arg1, type);
6409 if (tem)
6410 return tem;
6411 return t;
6412 }
6413 return convert (type, tem);
6414
6415 case TRUTH_ANDIF_EXPR:
6416 /* Note that the operands of this must be ints
6417 and their values must be 0 or 1.
6418 ("true" is a fixed value perhaps depending on the language.) */
6419 /* If first arg is constant zero, return it. */
6420 if (integer_zerop (arg0))
6421 return convert (type, arg0);
6422 case TRUTH_AND_EXPR:
6423 /* If either arg is constant true, drop it. */
6424 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6425 return non_lvalue (convert (type, arg1));
6426 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
6427 /* Preserve sequence points. */
6428 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
6429 return non_lvalue (convert (type, arg0));
6430 /* If second arg is constant zero, result is zero, but first arg
6431 must be evaluated. */
6432 if (integer_zerop (arg1))
6433 return omit_one_operand (type, arg1, arg0);
6434 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
6435 case will be handled here. */
6436 if (integer_zerop (arg0))
6437 return omit_one_operand (type, arg0, arg1);
6438
6439 truth_andor:
6440 /* We only do these simplifications if we are optimizing. */
6441 if (!optimize)
6442 return t;
6443
6444 /* Check for things like (A || B) && (A || C). We can convert this
6445 to A || (B && C). Note that either operator can be any of the four
6446 truth and/or operations and the transformation will still be
6447 valid. Also note that we only care about order for the
6448 ANDIF and ORIF operators. If B contains side effects, this
6449 might change the truth-value of A. */
6450 if (TREE_CODE (arg0) == TREE_CODE (arg1)
6451 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
6452 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
6453 || TREE_CODE (arg0) == TRUTH_AND_EXPR
6454 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
6455 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
6456 {
6457 tree a00 = TREE_OPERAND (arg0, 0);
6458 tree a01 = TREE_OPERAND (arg0, 1);
6459 tree a10 = TREE_OPERAND (arg1, 0);
6460 tree a11 = TREE_OPERAND (arg1, 1);
6461 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
6462 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
6463 && (code == TRUTH_AND_EXPR
6464 || code == TRUTH_OR_EXPR));
6465
6466 if (operand_equal_p (a00, a10, 0))
6467 return fold (build (TREE_CODE (arg0), type, a00,
6468 fold (build (code, type, a01, a11))));
6469 else if (commutative && operand_equal_p (a00, a11, 0))
6470 return fold (build (TREE_CODE (arg0), type, a00,
6471 fold (build (code, type, a01, a10))));
6472 else if (commutative && operand_equal_p (a01, a10, 0))
6473 return fold (build (TREE_CODE (arg0), type, a01,
6474 fold (build (code, type, a00, a11))));
6475
6476 /* This case if tricky because we must either have commutative
6477 operators or else A10 must not have side-effects. */
6478
6479 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
6480 && operand_equal_p (a01, a11, 0))
6481 return fold (build (TREE_CODE (arg0), type,
6482 fold (build (code, type, a00, a10)),
6483 a01));
6484 }
6485
6486 /* See if we can build a range comparison. */
6487 if (0 != (tem = fold_range_test (t)))
6488 return tem;
6489
6490 /* Check for the possibility of merging component references. If our
6491 lhs is another similar operation, try to merge its rhs with our
6492 rhs. Then try to merge our lhs and rhs. */
6493 if (TREE_CODE (arg0) == code
6494 && 0 != (tem = fold_truthop (code, type,
6495 TREE_OPERAND (arg0, 1), arg1)))
6496 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6497
6498 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
6499 return tem;
6500
6501 return t;
6502
6503 case TRUTH_ORIF_EXPR:
6504 /* Note that the operands of this must be ints
6505 and their values must be 0 or true.
6506 ("true" is a fixed value perhaps depending on the language.) */
6507 /* If first arg is constant true, return it. */
6508 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6509 return convert (type, arg0);
6510 case TRUTH_OR_EXPR:
6511 /* If either arg is constant zero, drop it. */
6512 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
6513 return non_lvalue (convert (type, arg1));
6514 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
6515 /* Preserve sequence points. */
6516 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
6517 return non_lvalue (convert (type, arg0));
6518 /* If second arg is constant true, result is true, but we must
6519 evaluate first arg. */
6520 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
6521 return omit_one_operand (type, arg1, arg0);
6522 /* Likewise for first arg, but note this only occurs here for
6523 TRUTH_OR_EXPR. */
6524 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6525 return omit_one_operand (type, arg0, arg1);
6526 goto truth_andor;
6527
6528 case TRUTH_XOR_EXPR:
6529 /* If either arg is constant zero, drop it. */
6530 if (integer_zerop (arg0))
6531 return non_lvalue (convert (type, arg1));
6532 if (integer_zerop (arg1))
6533 return non_lvalue (convert (type, arg0));
6534 /* If either arg is constant true, this is a logical inversion. */
6535 if (integer_onep (arg0))
6536 return non_lvalue (convert (type, invert_truthvalue (arg1)));
6537 if (integer_onep (arg1))
6538 return non_lvalue (convert (type, invert_truthvalue (arg0)));
6539 return t;
6540
6541 case EQ_EXPR:
6542 case NE_EXPR:
6543 case LT_EXPR:
6544 case GT_EXPR:
6545 case LE_EXPR:
6546 case GE_EXPR:
6547 /* If one arg is a real or integer constant, put it last. */
6548 if ((TREE_CODE (arg0) == INTEGER_CST
6549 && TREE_CODE (arg1) != INTEGER_CST)
6550 || (TREE_CODE (arg0) == REAL_CST
6551 && TREE_CODE (arg0) != REAL_CST))
6552 {
6553 TREE_OPERAND (t, 0) = arg1;
6554 TREE_OPERAND (t, 1) = arg0;
6555 arg0 = TREE_OPERAND (t, 0);
6556 arg1 = TREE_OPERAND (t, 1);
6557 code = swap_tree_comparison (code);
6558 TREE_SET_CODE (t, code);
6559 }
6560
6561 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
6562 {
6563 tree targ0 = strip_float_extensions (arg0);
6564 tree targ1 = strip_float_extensions (arg1);
6565 tree newtype = TREE_TYPE (targ0);
6566
6567 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
6568 newtype = TREE_TYPE (targ1);
6569
6570 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
6571 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
6572 return fold (build (code, type, convert (newtype, targ0),
6573 convert (newtype, targ1)));
6574
6575 /* (-a) CMP (-b) -> b CMP a */
6576 if (TREE_CODE (arg0) == NEGATE_EXPR
6577 && TREE_CODE (arg1) == NEGATE_EXPR)
6578 return fold (build (code, type, TREE_OPERAND (arg1, 0),
6579 TREE_OPERAND (arg0, 0)));
6580
6581 if (TREE_CODE (arg1) == REAL_CST)
6582 {
6583 REAL_VALUE_TYPE cst;
6584 cst = TREE_REAL_CST (arg1);
6585
6586 /* (-a) CMP CST -> a swap(CMP) (-CST) */
6587 if (TREE_CODE (arg0) == NEGATE_EXPR)
6588 return
6589 fold (build (swap_tree_comparison (code), type,
6590 TREE_OPERAND (arg0, 0),
6591 build_real (TREE_TYPE (arg1),
6592 REAL_VALUE_NEGATE (cst))));
6593
6594 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
6595 /* a CMP (-0) -> a CMP 0 */
6596 if (REAL_VALUE_MINUS_ZERO (cst))
6597 return fold (build (code, type, arg0,
6598 build_real (TREE_TYPE (arg1), dconst0)));
6599
6600 /* x != NaN is always true, other ops are always false. */
6601 if (REAL_VALUE_ISNAN (cst)
6602 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
6603 {
6604 t = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
6605 return omit_one_operand (type, convert (type, t), arg0);
6606 }
6607
6608 /* Fold comparisons against infinity. */
6609 if (REAL_VALUE_ISINF (cst))
6610 {
6611 tem = fold_inf_compare (code, type, arg0, arg1);
6612 if (tem != NULL_TREE)
6613 return tem;
6614 }
6615 }
6616
6617 /* If this is a comparison of a real constant with a PLUS_EXPR
6618 or a MINUS_EXPR of a real constant, we can convert it into a
6619 comparison with a revised real constant as long as no overflow
6620 occurs when unsafe_math_optimizations are enabled. */
6621 if (flag_unsafe_math_optimizations
6622 && TREE_CODE (arg1) == REAL_CST
6623 && (TREE_CODE (arg0) == PLUS_EXPR
6624 || TREE_CODE (arg0) == MINUS_EXPR)
6625 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6626 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6627 ? MINUS_EXPR : PLUS_EXPR,
6628 arg1, TREE_OPERAND (arg0, 1), 0))
6629 && ! TREE_CONSTANT_OVERFLOW (tem))
6630 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6631
6632 /* Likewise, we can simplify a comparison of a real constant with
6633 a MINUS_EXPR whose first operand is also a real constant, i.e.
6634 (c1 - x) < c2 becomes x > c1-c2. */
6635 if (flag_unsafe_math_optimizations
6636 && TREE_CODE (arg1) == REAL_CST
6637 && TREE_CODE (arg0) == MINUS_EXPR
6638 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
6639 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
6640 arg1, 0))
6641 && ! TREE_CONSTANT_OVERFLOW (tem))
6642 return fold (build (swap_tree_comparison (code), type,
6643 TREE_OPERAND (arg0, 1), tem));
6644
6645 /* Fold comparisons against built-in math functions. */
6646 if (TREE_CODE (arg1) == REAL_CST
6647 && flag_unsafe_math_optimizations
6648 && ! flag_errno_math)
6649 {
6650 enum built_in_function fcode = builtin_mathfn_code (arg0);
6651
6652 if (fcode != END_BUILTINS)
6653 {
6654 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
6655 if (tem != NULL_TREE)
6656 return tem;
6657 }
6658 }
6659 }
6660
6661 /* Convert foo++ == CONST into ++foo == CONST + INCR.
6662 First, see if one arg is constant; find the constant arg
6663 and the other one. */
6664 {
6665 tree constop = 0, varop = NULL_TREE;
6666 int constopnum = -1;
6667
6668 if (TREE_CONSTANT (arg1))
6669 constopnum = 1, constop = arg1, varop = arg0;
6670 if (TREE_CONSTANT (arg0))
6671 constopnum = 0, constop = arg0, varop = arg1;
6672
6673 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
6674 {
6675 /* This optimization is invalid for ordered comparisons
6676 if CONST+INCR overflows or if foo+incr might overflow.
6677 This optimization is invalid for floating point due to rounding.
6678 For pointer types we assume overflow doesn't happen. */
6679 if (POINTER_TYPE_P (TREE_TYPE (varop))
6680 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6681 && (code == EQ_EXPR || code == NE_EXPR)))
6682 {
6683 tree newconst
6684 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
6685 constop, TREE_OPERAND (varop, 1)));
6686
6687 /* Do not overwrite the current varop to be a preincrement,
6688 create a new node so that we won't confuse our caller who
6689 might create trees and throw them away, reusing the
6690 arguments that they passed to build. This shows up in
6691 the THEN or ELSE parts of ?: being postincrements. */
6692 varop = build (PREINCREMENT_EXPR, TREE_TYPE (varop),
6693 TREE_OPERAND (varop, 0),
6694 TREE_OPERAND (varop, 1));
6695
6696 /* If VAROP is a reference to a bitfield, we must mask
6697 the constant by the width of the field. */
6698 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6699 && DECL_BIT_FIELD(TREE_OPERAND
6700 (TREE_OPERAND (varop, 0), 1)))
6701 {
6702 int size
6703 = TREE_INT_CST_LOW (DECL_SIZE
6704 (TREE_OPERAND
6705 (TREE_OPERAND (varop, 0), 1)));
6706 tree mask, unsigned_type;
6707 unsigned int precision;
6708 tree folded_compare;
6709
6710 /* First check whether the comparison would come out
6711 always the same. If we don't do that we would
6712 change the meaning with the masking. */
6713 if (constopnum == 0)
6714 folded_compare = fold (build (code, type, constop,
6715 TREE_OPERAND (varop, 0)));
6716 else
6717 folded_compare = fold (build (code, type,
6718 TREE_OPERAND (varop, 0),
6719 constop));
6720 if (integer_zerop (folded_compare)
6721 || integer_onep (folded_compare))
6722 return omit_one_operand (type, folded_compare, varop);
6723
6724 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
6725 precision = TYPE_PRECISION (unsigned_type);
6726 mask = build_int_2 (~0, ~0);
6727 TREE_TYPE (mask) = unsigned_type;
6728 force_fit_type (mask, 0);
6729 mask = const_binop (RSHIFT_EXPR, mask,
6730 size_int (precision - size), 0);
6731 newconst = fold (build (BIT_AND_EXPR,
6732 TREE_TYPE (varop), newconst,
6733 convert (TREE_TYPE (varop),
6734 mask)));
6735 }
6736
6737 t = build (code, type,
6738 (constopnum == 0) ? newconst : varop,
6739 (constopnum == 1) ? newconst : varop);
6740 return t;
6741 }
6742 }
6743 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
6744 {
6745 if (POINTER_TYPE_P (TREE_TYPE (varop))
6746 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6747 && (code == EQ_EXPR || code == NE_EXPR)))
6748 {
6749 tree newconst
6750 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
6751 constop, TREE_OPERAND (varop, 1)));
6752
6753 /* Do not overwrite the current varop to be a predecrement,
6754 create a new node so that we won't confuse our caller who
6755 might create trees and throw them away, reusing the
6756 arguments that they passed to build. This shows up in
6757 the THEN or ELSE parts of ?: being postdecrements. */
6758 varop = build (PREDECREMENT_EXPR, TREE_TYPE (varop),
6759 TREE_OPERAND (varop, 0),
6760 TREE_OPERAND (varop, 1));
6761
6762 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6763 && DECL_BIT_FIELD(TREE_OPERAND
6764 (TREE_OPERAND (varop, 0), 1)))
6765 {
6766 int size
6767 = TREE_INT_CST_LOW (DECL_SIZE
6768 (TREE_OPERAND
6769 (TREE_OPERAND (varop, 0), 1)));
6770 tree mask, unsigned_type;
6771 unsigned int precision;
6772 tree folded_compare;
6773
6774 if (constopnum == 0)
6775 folded_compare = fold (build (code, type, constop,
6776 TREE_OPERAND (varop, 0)));
6777 else
6778 folded_compare = fold (build (code, type,
6779 TREE_OPERAND (varop, 0),
6780 constop));
6781 if (integer_zerop (folded_compare)
6782 || integer_onep (folded_compare))
6783 return omit_one_operand (type, folded_compare, varop);
6784
6785 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
6786 precision = TYPE_PRECISION (unsigned_type);
6787 mask = build_int_2 (~0, ~0);
6788 TREE_TYPE (mask) = TREE_TYPE (varop);
6789 force_fit_type (mask, 0);
6790 mask = const_binop (RSHIFT_EXPR, mask,
6791 size_int (precision - size), 0);
6792 newconst = fold (build (BIT_AND_EXPR,
6793 TREE_TYPE (varop), newconst,
6794 convert (TREE_TYPE (varop),
6795 mask)));
6796 }
6797
6798 t = build (code, type,
6799 (constopnum == 0) ? newconst : varop,
6800 (constopnum == 1) ? newconst : varop);
6801 return t;
6802 }
6803 }
6804 }
6805
6806 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
6807 This transformation affects the cases which are handled in later
6808 optimizations involving comparisons with non-negative constants. */
6809 if (TREE_CODE (arg1) == INTEGER_CST
6810 && TREE_CODE (arg0) != INTEGER_CST
6811 && tree_int_cst_sgn (arg1) > 0)
6812 {
6813 switch (code)
6814 {
6815 case GE_EXPR:
6816 code = GT_EXPR;
6817 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6818 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6819 break;
6820
6821 case LT_EXPR:
6822 code = LE_EXPR;
6823 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6824 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6825 break;
6826
6827 default:
6828 break;
6829 }
6830 }
6831
6832 /* Comparisons with the highest or lowest possible integer of
6833 the specified size will have known values. */
6834 {
6835 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
6836
6837 if (TREE_CODE (arg1) == INTEGER_CST
6838 && ! TREE_CONSTANT_OVERFLOW (arg1)
6839 && width <= HOST_BITS_PER_WIDE_INT
6840 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6841 || POINTER_TYPE_P (TREE_TYPE (arg1))))
6842 {
6843 unsigned HOST_WIDE_INT signed_max;
6844 unsigned HOST_WIDE_INT max, min;
6845
6846 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
6847
6848 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6849 {
6850 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
6851 min = 0;
6852 }
6853 else
6854 {
6855 max = signed_max;
6856 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
6857 }
6858
6859 if (TREE_INT_CST_HIGH (arg1) == 0
6860 && TREE_INT_CST_LOW (arg1) == max)
6861 switch (code)
6862 {
6863 case GT_EXPR:
6864 return omit_one_operand (type,
6865 convert (type, integer_zero_node),
6866 arg0);
6867 case GE_EXPR:
6868 code = EQ_EXPR;
6869 TREE_SET_CODE (t, EQ_EXPR);
6870 break;
6871 case LE_EXPR:
6872 return omit_one_operand (type,
6873 convert (type, integer_one_node),
6874 arg0);
6875 case LT_EXPR:
6876 code = NE_EXPR;
6877 TREE_SET_CODE (t, NE_EXPR);
6878 break;
6879
6880 /* The GE_EXPR and LT_EXPR cases above are not normally
6881 reached because of previous transformations. */
6882
6883 default:
6884 break;
6885 }
6886 else if (TREE_INT_CST_HIGH (arg1) == 0
6887 && TREE_INT_CST_LOW (arg1) == max - 1)
6888 switch (code)
6889 {
6890 case GT_EXPR:
6891 code = EQ_EXPR;
6892 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
6893 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6894 break;
6895 case LE_EXPR:
6896 code = NE_EXPR;
6897 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
6898 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6899 break;
6900 default:
6901 break;
6902 }
6903 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
6904 && TREE_INT_CST_LOW (arg1) == min)
6905 switch (code)
6906 {
6907 case LT_EXPR:
6908 return omit_one_operand (type,
6909 convert (type, integer_zero_node),
6910 arg0);
6911 case LE_EXPR:
6912 code = EQ_EXPR;
6913 TREE_SET_CODE (t, EQ_EXPR);
6914 break;
6915
6916 case GE_EXPR:
6917 return omit_one_operand (type,
6918 convert (type, integer_one_node),
6919 arg0);
6920 case GT_EXPR:
6921 code = NE_EXPR;
6922 TREE_SET_CODE (t, NE_EXPR);
6923 break;
6924
6925 default:
6926 break;
6927 }
6928 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
6929 && TREE_INT_CST_LOW (arg1) == min + 1)
6930 switch (code)
6931 {
6932 case GE_EXPR:
6933 code = NE_EXPR;
6934 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6935 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6936 break;
6937 case LT_EXPR:
6938 code = EQ_EXPR;
6939 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6940 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6941 break;
6942 default:
6943 break;
6944 }
6945
6946 else if (TREE_INT_CST_HIGH (arg1) == 0
6947 && TREE_INT_CST_LOW (arg1) == signed_max
6948 && TREE_UNSIGNED (TREE_TYPE (arg1))
6949 /* signed_type does not work on pointer types. */
6950 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
6951 {
6952 /* The following case also applies to X < signed_max+1
6953 and X >= signed_max+1 because previous transformations. */
6954 if (code == LE_EXPR || code == GT_EXPR)
6955 {
6956 tree st0, st1;
6957 st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
6958 st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
6959 return fold
6960 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
6961 type, convert (st0, arg0),
6962 convert (st1, integer_zero_node)));
6963 }
6964 }
6965 }
6966 }
6967
6968 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
6969 a MINUS_EXPR of a constant, we can convert it into a comparison with
6970 a revised constant as long as no overflow occurs. */
6971 if ((code == EQ_EXPR || code == NE_EXPR)
6972 && TREE_CODE (arg1) == INTEGER_CST
6973 && (TREE_CODE (arg0) == PLUS_EXPR
6974 || TREE_CODE (arg0) == MINUS_EXPR)
6975 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6976 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6977 ? MINUS_EXPR : PLUS_EXPR,
6978 arg1, TREE_OPERAND (arg0, 1), 0))
6979 && ! TREE_CONSTANT_OVERFLOW (tem))
6980 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6981
6982 /* Similarly for a NEGATE_EXPR. */
6983 else if ((code == EQ_EXPR || code == NE_EXPR)
6984 && TREE_CODE (arg0) == NEGATE_EXPR
6985 && TREE_CODE (arg1) == INTEGER_CST
6986 && 0 != (tem = negate_expr (arg1))
6987 && TREE_CODE (tem) == INTEGER_CST
6988 && ! TREE_CONSTANT_OVERFLOW (tem))
6989 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6990
6991 /* If we have X - Y == 0, we can convert that to X == Y and similarly
6992 for !=. Don't do this for ordered comparisons due to overflow. */
6993 else if ((code == NE_EXPR || code == EQ_EXPR)
6994 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
6995 return fold (build (code, type,
6996 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
6997
6998 /* If we are widening one operand of an integer comparison,
6999 see if the other operand is similarly being widened. Perhaps we
7000 can do the comparison in the narrower type. */
7001 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7002 && TREE_CODE (arg0) == NOP_EXPR
7003 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7004 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7005 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7006 || (TREE_CODE (t1) == INTEGER_CST
7007 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7008 return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
7009
7010 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7011 constant, we can simplify it. */
7012 else if (TREE_CODE (arg1) == INTEGER_CST
7013 && (TREE_CODE (arg0) == MIN_EXPR
7014 || TREE_CODE (arg0) == MAX_EXPR)
7015 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7016 return optimize_minmax_comparison (t);
7017
7018 /* If we are comparing an ABS_EXPR with a constant, we can
7019 convert all the cases into explicit comparisons, but they may
7020 well not be faster than doing the ABS and one comparison.
7021 But ABS (X) <= C is a range comparison, which becomes a subtraction
7022 and a comparison, and is probably faster. */
7023 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7024 && TREE_CODE (arg0) == ABS_EXPR
7025 && ! TREE_SIDE_EFFECTS (arg0)
7026 && (0 != (tem = negate_expr (arg1)))
7027 && TREE_CODE (tem) == INTEGER_CST
7028 && ! TREE_CONSTANT_OVERFLOW (tem))
7029 return fold (build (TRUTH_ANDIF_EXPR, type,
7030 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
7031 build (LE_EXPR, type,
7032 TREE_OPERAND (arg0, 0), arg1)));
7033
7034 /* If this is an EQ or NE comparison with zero and ARG0 is
7035 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7036 two operations, but the latter can be done in one less insn
7037 on machines that have only two-operand insns or on which a
7038 constant cannot be the first operand. */
7039 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7040 && TREE_CODE (arg0) == BIT_AND_EXPR)
7041 {
7042 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
7043 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
7044 return
7045 fold (build (code, type,
7046 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7047 build (RSHIFT_EXPR,
7048 TREE_TYPE (TREE_OPERAND (arg0, 0)),
7049 TREE_OPERAND (arg0, 1),
7050 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
7051 convert (TREE_TYPE (arg0),
7052 integer_one_node)),
7053 arg1));
7054 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7055 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7056 return
7057 fold (build (code, type,
7058 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7059 build (RSHIFT_EXPR,
7060 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7061 TREE_OPERAND (arg0, 0),
7062 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
7063 convert (TREE_TYPE (arg0),
7064 integer_one_node)),
7065 arg1));
7066 }
7067
7068 /* If this is an NE or EQ comparison of zero against the result of a
7069 signed MOD operation whose second operand is a power of 2, make
7070 the MOD operation unsigned since it is simpler and equivalent. */
7071 if ((code == NE_EXPR || code == EQ_EXPR)
7072 && integer_zerop (arg1)
7073 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
7074 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7075 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7076 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7077 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7078 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7079 {
7080 tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
7081 tree newmod = build (TREE_CODE (arg0), newtype,
7082 convert (newtype, TREE_OPERAND (arg0, 0)),
7083 convert (newtype, TREE_OPERAND (arg0, 1)));
7084
7085 return build (code, type, newmod, convert (newtype, arg1));
7086 }
7087
7088 /* If this is an NE comparison of zero with an AND of one, remove the
7089 comparison since the AND will give the correct value. */
7090 if (code == NE_EXPR && integer_zerop (arg1)
7091 && TREE_CODE (arg0) == BIT_AND_EXPR
7092 && integer_onep (TREE_OPERAND (arg0, 1)))
7093 return convert (type, arg0);
7094
7095 /* If we have (A & C) == C where C is a power of 2, convert this into
7096 (A & C) != 0. Similarly for NE_EXPR. */
7097 if ((code == EQ_EXPR || code == NE_EXPR)
7098 && TREE_CODE (arg0) == BIT_AND_EXPR
7099 && integer_pow2p (TREE_OPERAND (arg0, 1))
7100 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7101 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7102 arg0, integer_zero_node));
7103
7104 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7105 2, then fold the expression into shifts and logical operations. */
7106 tem = fold_single_bit_test (code, arg0, arg1, type);
7107 if (tem)
7108 return tem;
7109
7110 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7111 and similarly for >= into !=. */
7112 if ((code == LT_EXPR || code == GE_EXPR)
7113 && TREE_UNSIGNED (TREE_TYPE (arg0))
7114 && TREE_CODE (arg1) == LSHIFT_EXPR
7115 && integer_onep (TREE_OPERAND (arg1, 0)))
7116 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7117 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7118 TREE_OPERAND (arg1, 1)),
7119 convert (TREE_TYPE (arg0), integer_zero_node));
7120
7121 else if ((code == LT_EXPR || code == GE_EXPR)
7122 && TREE_UNSIGNED (TREE_TYPE (arg0))
7123 && (TREE_CODE (arg1) == NOP_EXPR
7124 || TREE_CODE (arg1) == CONVERT_EXPR)
7125 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7126 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7127 return
7128 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7129 convert (TREE_TYPE (arg0),
7130 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7131 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
7132 convert (TREE_TYPE (arg0), integer_zero_node));
7133
7134 /* Simplify comparison of something with itself. (For IEEE
7135 floating-point, we can only do some of these simplifications.) */
7136 if (operand_equal_p (arg0, arg1, 0))
7137 {
7138 switch (code)
7139 {
7140 case EQ_EXPR:
7141 case GE_EXPR:
7142 case LE_EXPR:
7143 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7144 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7145 return constant_boolean_node (1, type);
7146 code = EQ_EXPR;
7147 TREE_SET_CODE (t, code);
7148 break;
7149
7150 case NE_EXPR:
7151 /* For NE, we can only do this simplification if integer
7152 or we don't honor IEEE floating point NaNs. */
7153 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7154 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7155 break;
7156 /* ... fall through ... */
7157 case GT_EXPR:
7158 case LT_EXPR:
7159 return constant_boolean_node (0, type);
7160 default:
7161 abort ();
7162 }
7163 }
7164
7165 /* If we are comparing an expression that just has comparisons
7166 of two integer values, arithmetic expressions of those comparisons,
7167 and constants, we can simplify it. There are only three cases
7168 to check: the two values can either be equal, the first can be
7169 greater, or the second can be greater. Fold the expression for
7170 those three values. Since each value must be 0 or 1, we have
7171 eight possibilities, each of which corresponds to the constant 0
7172 or 1 or one of the six possible comparisons.
7173
7174 This handles common cases like (a > b) == 0 but also handles
7175 expressions like ((x > y) - (y > x)) > 0, which supposedly
7176 occur in macroized code. */
7177
7178 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7179 {
7180 tree cval1 = 0, cval2 = 0;
7181 int save_p = 0;
7182
7183 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7184 /* Don't handle degenerate cases here; they should already
7185 have been handled anyway. */
7186 && cval1 != 0 && cval2 != 0
7187 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7188 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7189 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7190 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7191 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7192 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7193 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7194 {
7195 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7196 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7197
7198 /* We can't just pass T to eval_subst in case cval1 or cval2
7199 was the same as ARG1. */
7200
7201 tree high_result
7202 = fold (build (code, type,
7203 eval_subst (arg0, cval1, maxval, cval2, minval),
7204 arg1));
7205 tree equal_result
7206 = fold (build (code, type,
7207 eval_subst (arg0, cval1, maxval, cval2, maxval),
7208 arg1));
7209 tree low_result
7210 = fold (build (code, type,
7211 eval_subst (arg0, cval1, minval, cval2, maxval),
7212 arg1));
7213
7214 /* All three of these results should be 0 or 1. Confirm they
7215 are. Then use those values to select the proper code
7216 to use. */
7217
7218 if ((integer_zerop (high_result)
7219 || integer_onep (high_result))
7220 && (integer_zerop (equal_result)
7221 || integer_onep (equal_result))
7222 && (integer_zerop (low_result)
7223 || integer_onep (low_result)))
7224 {
7225 /* Make a 3-bit mask with the high-order bit being the
7226 value for `>', the next for '=', and the low for '<'. */
7227 switch ((integer_onep (high_result) * 4)
7228 + (integer_onep (equal_result) * 2)
7229 + integer_onep (low_result))
7230 {
7231 case 0:
7232 /* Always false. */
7233 return omit_one_operand (type, integer_zero_node, arg0);
7234 case 1:
7235 code = LT_EXPR;
7236 break;
7237 case 2:
7238 code = EQ_EXPR;
7239 break;
7240 case 3:
7241 code = LE_EXPR;
7242 break;
7243 case 4:
7244 code = GT_EXPR;
7245 break;
7246 case 5:
7247 code = NE_EXPR;
7248 break;
7249 case 6:
7250 code = GE_EXPR;
7251 break;
7252 case 7:
7253 /* Always true. */
7254 return omit_one_operand (type, integer_one_node, arg0);
7255 }
7256
7257 t = build (code, type, cval1, cval2);
7258 if (save_p)
7259 return save_expr (t);
7260 else
7261 return fold (t);
7262 }
7263 }
7264 }
7265
7266 /* If this is a comparison of a field, we may be able to simplify it. */
7267 if (((TREE_CODE (arg0) == COMPONENT_REF
7268 && (*lang_hooks.can_use_bit_fields_p) ())
7269 || TREE_CODE (arg0) == BIT_FIELD_REF)
7270 && (code == EQ_EXPR || code == NE_EXPR)
7271 /* Handle the constant case even without -O
7272 to make sure the warnings are given. */
7273 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
7274 {
7275 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
7276 return t1 ? t1 : t;
7277 }
7278
7279 /* If this is a comparison of complex values and either or both sides
7280 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7281 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7282 This may prevent needless evaluations. */
7283 if ((code == EQ_EXPR || code == NE_EXPR)
7284 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
7285 && (TREE_CODE (arg0) == COMPLEX_EXPR
7286 || TREE_CODE (arg1) == COMPLEX_EXPR
7287 || TREE_CODE (arg0) == COMPLEX_CST
7288 || TREE_CODE (arg1) == COMPLEX_CST))
7289 {
7290 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
7291 tree real0, imag0, real1, imag1;
7292
7293 arg0 = save_expr (arg0);
7294 arg1 = save_expr (arg1);
7295 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
7296 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
7297 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
7298 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
7299
7300 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
7301 : TRUTH_ORIF_EXPR),
7302 type,
7303 fold (build (code, type, real0, real1)),
7304 fold (build (code, type, imag0, imag1))));
7305 }
7306
7307 /* Optimize comparisons of strlen vs zero to a compare of the
7308 first character of the string vs zero. To wit,
7309 strlen(ptr) == 0 => *ptr == 0
7310 strlen(ptr) != 0 => *ptr != 0
7311 Other cases should reduce to one of these two (or a constant)
7312 due to the return value of strlen being unsigned. */
7313 if ((code == EQ_EXPR || code == NE_EXPR)
7314 && integer_zerop (arg1)
7315 && TREE_CODE (arg0) == CALL_EXPR
7316 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR)
7317 {
7318 tree fndecl = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7319 tree arglist;
7320
7321 if (TREE_CODE (fndecl) == FUNCTION_DECL
7322 && DECL_BUILT_IN (fndecl)
7323 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
7324 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
7325 && (arglist = TREE_OPERAND (arg0, 1))
7326 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
7327 && ! TREE_CHAIN (arglist))
7328 return fold (build (code, type,
7329 build1 (INDIRECT_REF, char_type_node,
7330 TREE_VALUE(arglist)),
7331 integer_zero_node));
7332 }
7333
7334 /* From here on, the only cases we handle are when the result is
7335 known to be a constant.
7336
7337 To compute GT, swap the arguments and do LT.
7338 To compute GE, do LT and invert the result.
7339 To compute LE, swap the arguments, do LT and invert the result.
7340 To compute NE, do EQ and invert the result.
7341
7342 Therefore, the code below must handle only EQ and LT. */
7343
7344 if (code == LE_EXPR || code == GT_EXPR)
7345 {
7346 tem = arg0, arg0 = arg1, arg1 = tem;
7347 code = swap_tree_comparison (code);
7348 }
7349
7350 /* Note that it is safe to invert for real values here because we
7351 will check below in the one case that it matters. */
7352
7353 t1 = NULL_TREE;
7354 invert = 0;
7355 if (code == NE_EXPR || code == GE_EXPR)
7356 {
7357 invert = 1;
7358 code = invert_tree_comparison (code);
7359 }
7360
7361 /* Compute a result for LT or EQ if args permit;
7362 otherwise return T. */
7363 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
7364 {
7365 if (code == EQ_EXPR)
7366 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
7367 else
7368 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
7369 ? INT_CST_LT_UNSIGNED (arg0, arg1)
7370 : INT_CST_LT (arg0, arg1)),
7371 0);
7372 }
7373
7374 #if 0 /* This is no longer useful, but breaks some real code. */
7375 /* Assume a nonexplicit constant cannot equal an explicit one,
7376 since such code would be undefined anyway.
7377 Exception: on sysvr4, using #pragma weak,
7378 a label can come out as 0. */
7379 else if (TREE_CODE (arg1) == INTEGER_CST
7380 && !integer_zerop (arg1)
7381 && TREE_CONSTANT (arg0)
7382 && TREE_CODE (arg0) == ADDR_EXPR
7383 && code == EQ_EXPR)
7384 t1 = build_int_2 (0, 0);
7385 #endif
7386 /* Two real constants can be compared explicitly. */
7387 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
7388 {
7389 /* If either operand is a NaN, the result is false with two
7390 exceptions: First, an NE_EXPR is true on NaNs, but that case
7391 is already handled correctly since we will be inverting the
7392 result for NE_EXPR. Second, if we had inverted a LE_EXPR
7393 or a GE_EXPR into a LT_EXPR, we must return true so that it
7394 will be inverted into false. */
7395
7396 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
7397 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
7398 t1 = build_int_2 (invert && code == LT_EXPR, 0);
7399
7400 else if (code == EQ_EXPR)
7401 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
7402 TREE_REAL_CST (arg1)),
7403 0);
7404 else
7405 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
7406 TREE_REAL_CST (arg1)),
7407 0);
7408 }
7409
7410 if (t1 == NULL_TREE)
7411 return t;
7412
7413 if (invert)
7414 TREE_INT_CST_LOW (t1) ^= 1;
7415
7416 TREE_TYPE (t1) = type;
7417 if (TREE_CODE (type) == BOOLEAN_TYPE)
7418 return (*lang_hooks.truthvalue_conversion) (t1);
7419 return t1;
7420
7421 case COND_EXPR:
7422 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
7423 so all simple results must be passed through pedantic_non_lvalue. */
7424 if (TREE_CODE (arg0) == INTEGER_CST)
7425 return pedantic_non_lvalue
7426 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
7427 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
7428 return pedantic_omit_one_operand (type, arg1, arg0);
7429
7430 /* If the second operand is zero, invert the comparison and swap
7431 the second and third operands. Likewise if the second operand
7432 is constant and the third is not or if the third operand is
7433 equivalent to the first operand of the comparison. */
7434
7435 if (integer_zerop (arg1)
7436 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
7437 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7438 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7439 TREE_OPERAND (t, 2),
7440 TREE_OPERAND (arg0, 1))))
7441 {
7442 /* See if this can be inverted. If it can't, possibly because
7443 it was a floating-point inequality comparison, don't do
7444 anything. */
7445 tem = invert_truthvalue (arg0);
7446
7447 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7448 {
7449 t = build (code, type, tem,
7450 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
7451 arg0 = tem;
7452 /* arg1 should be the first argument of the new T. */
7453 arg1 = TREE_OPERAND (t, 1);
7454 STRIP_NOPS (arg1);
7455 }
7456 }
7457
7458 /* If we have A op B ? A : C, we may be able to convert this to a
7459 simpler expression, depending on the operation and the values
7460 of B and C. Signed zeros prevent all of these transformations,
7461 for reasons given above each one. */
7462
7463 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7464 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7465 arg1, TREE_OPERAND (arg0, 1))
7466 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
7467 {
7468 tree arg2 = TREE_OPERAND (t, 2);
7469 enum tree_code comp_code = TREE_CODE (arg0);
7470
7471 STRIP_NOPS (arg2);
7472
7473 /* If we have A op 0 ? A : -A, consider applying the following
7474 transformations:
7475
7476 A == 0? A : -A same as -A
7477 A != 0? A : -A same as A
7478 A >= 0? A : -A same as abs (A)
7479 A > 0? A : -A same as abs (A)
7480 A <= 0? A : -A same as -abs (A)
7481 A < 0? A : -A same as -abs (A)
7482
7483 None of these transformations work for modes with signed
7484 zeros. If A is +/-0, the first two transformations will
7485 change the sign of the result (from +0 to -0, or vice
7486 versa). The last four will fix the sign of the result,
7487 even though the original expressions could be positive or
7488 negative, depending on the sign of A.
7489
7490 Note that all these transformations are correct if A is
7491 NaN, since the two alternatives (A and -A) are also NaNs. */
7492 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
7493 ? real_zerop (TREE_OPERAND (arg0, 1))
7494 : integer_zerop (TREE_OPERAND (arg0, 1)))
7495 && TREE_CODE (arg2) == NEGATE_EXPR
7496 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
7497 switch (comp_code)
7498 {
7499 case EQ_EXPR:
7500 return
7501 pedantic_non_lvalue
7502 (convert (type,
7503 negate_expr
7504 (convert (TREE_TYPE (TREE_OPERAND (t, 1)),
7505 arg1))));
7506 case NE_EXPR:
7507 return pedantic_non_lvalue (convert (type, arg1));
7508 case GE_EXPR:
7509 case GT_EXPR:
7510 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7511 arg1 = convert ((*lang_hooks.types.signed_type)
7512 (TREE_TYPE (arg1)), arg1);
7513 return pedantic_non_lvalue
7514 (convert (type, fold (build1 (ABS_EXPR,
7515 TREE_TYPE (arg1), arg1))));
7516 case LE_EXPR:
7517 case LT_EXPR:
7518 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7519 arg1 = convert ((lang_hooks.types.signed_type)
7520 (TREE_TYPE (arg1)), arg1);
7521 return pedantic_non_lvalue
7522 (negate_expr (convert (type,
7523 fold (build1 (ABS_EXPR,
7524 TREE_TYPE (arg1),
7525 arg1)))));
7526 default:
7527 abort ();
7528 }
7529
7530 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
7531 A == 0 ? A : 0 is always 0 unless A is -0. Note that
7532 both transformations are correct when A is NaN: A != 0
7533 is then true, and A == 0 is false. */
7534
7535 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
7536 {
7537 if (comp_code == NE_EXPR)
7538 return pedantic_non_lvalue (convert (type, arg1));
7539 else if (comp_code == EQ_EXPR)
7540 return pedantic_non_lvalue (convert (type, integer_zero_node));
7541 }
7542
7543 /* Try some transformations of A op B ? A : B.
7544
7545 A == B? A : B same as B
7546 A != B? A : B same as A
7547 A >= B? A : B same as max (A, B)
7548 A > B? A : B same as max (B, A)
7549 A <= B? A : B same as min (A, B)
7550 A < B? A : B same as min (B, A)
7551
7552 As above, these transformations don't work in the presence
7553 of signed zeros. For example, if A and B are zeros of
7554 opposite sign, the first two transformations will change
7555 the sign of the result. In the last four, the original
7556 expressions give different results for (A=+0, B=-0) and
7557 (A=-0, B=+0), but the transformed expressions do not.
7558
7559 The first two transformations are correct if either A or B
7560 is a NaN. In the first transformation, the condition will
7561 be false, and B will indeed be chosen. In the case of the
7562 second transformation, the condition A != B will be true,
7563 and A will be chosen.
7564
7565 The conversions to max() and min() are not correct if B is
7566 a number and A is not. The conditions in the original
7567 expressions will be false, so all four give B. The min()
7568 and max() versions would give a NaN instead. */
7569 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
7570 arg2, TREE_OPERAND (arg0, 0)))
7571 {
7572 tree comp_op0 = TREE_OPERAND (arg0, 0);
7573 tree comp_op1 = TREE_OPERAND (arg0, 1);
7574 tree comp_type = TREE_TYPE (comp_op0);
7575
7576 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
7577 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
7578 {
7579 comp_type = type;
7580 comp_op0 = arg1;
7581 comp_op1 = arg2;
7582 }
7583
7584 switch (comp_code)
7585 {
7586 case EQ_EXPR:
7587 return pedantic_non_lvalue (convert (type, arg2));
7588 case NE_EXPR:
7589 return pedantic_non_lvalue (convert (type, arg1));
7590 case LE_EXPR:
7591 case LT_EXPR:
7592 /* In C++ a ?: expression can be an lvalue, so put the
7593 operand which will be used if they are equal first
7594 so that we can convert this back to the
7595 corresponding COND_EXPR. */
7596 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
7597 return pedantic_non_lvalue
7598 (convert (type, fold (build (MIN_EXPR, comp_type,
7599 (comp_code == LE_EXPR
7600 ? comp_op0 : comp_op1),
7601 (comp_code == LE_EXPR
7602 ? comp_op1 : comp_op0)))));
7603 break;
7604 case GE_EXPR:
7605 case GT_EXPR:
7606 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
7607 return pedantic_non_lvalue
7608 (convert (type, fold (build (MAX_EXPR, comp_type,
7609 (comp_code == GE_EXPR
7610 ? comp_op0 : comp_op1),
7611 (comp_code == GE_EXPR
7612 ? comp_op1 : comp_op0)))));
7613 break;
7614 default:
7615 abort ();
7616 }
7617 }
7618
7619 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
7620 we might still be able to simplify this. For example,
7621 if C1 is one less or one more than C2, this might have started
7622 out as a MIN or MAX and been transformed by this function.
7623 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
7624
7625 if (INTEGRAL_TYPE_P (type)
7626 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7627 && TREE_CODE (arg2) == INTEGER_CST)
7628 switch (comp_code)
7629 {
7630 case EQ_EXPR:
7631 /* We can replace A with C1 in this case. */
7632 arg1 = convert (type, TREE_OPERAND (arg0, 1));
7633 t = build (code, type, TREE_OPERAND (t, 0), arg1,
7634 TREE_OPERAND (t, 2));
7635 break;
7636
7637 case LT_EXPR:
7638 /* If C1 is C2 + 1, this is min(A, C2). */
7639 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
7640 && operand_equal_p (TREE_OPERAND (arg0, 1),
7641 const_binop (PLUS_EXPR, arg2,
7642 integer_one_node, 0), 1))
7643 return pedantic_non_lvalue
7644 (fold (build (MIN_EXPR, type, arg1, arg2)));
7645 break;
7646
7647 case LE_EXPR:
7648 /* If C1 is C2 - 1, this is min(A, C2). */
7649 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
7650 && operand_equal_p (TREE_OPERAND (arg0, 1),
7651 const_binop (MINUS_EXPR, arg2,
7652 integer_one_node, 0), 1))
7653 return pedantic_non_lvalue
7654 (fold (build (MIN_EXPR, type, arg1, arg2)));
7655 break;
7656
7657 case GT_EXPR:
7658 /* If C1 is C2 - 1, this is max(A, C2). */
7659 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
7660 && operand_equal_p (TREE_OPERAND (arg0, 1),
7661 const_binop (MINUS_EXPR, arg2,
7662 integer_one_node, 0), 1))
7663 return pedantic_non_lvalue
7664 (fold (build (MAX_EXPR, type, arg1, arg2)));
7665 break;
7666
7667 case GE_EXPR:
7668 /* If C1 is C2 + 1, this is max(A, C2). */
7669 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
7670 && operand_equal_p (TREE_OPERAND (arg0, 1),
7671 const_binop (PLUS_EXPR, arg2,
7672 integer_one_node, 0), 1))
7673 return pedantic_non_lvalue
7674 (fold (build (MAX_EXPR, type, arg1, arg2)));
7675 break;
7676 case NE_EXPR:
7677 break;
7678 default:
7679 abort ();
7680 }
7681 }
7682
7683 /* If the second operand is simpler than the third, swap them
7684 since that produces better jump optimization results. */
7685 if ((TREE_CONSTANT (arg1) || DECL_P (arg1)
7686 || TREE_CODE (arg1) == SAVE_EXPR)
7687 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
7688 || DECL_P (TREE_OPERAND (t, 2))
7689 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
7690 {
7691 /* See if this can be inverted. If it can't, possibly because
7692 it was a floating-point inequality comparison, don't do
7693 anything. */
7694 tem = invert_truthvalue (arg0);
7695
7696 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7697 {
7698 t = build (code, type, tem,
7699 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
7700 arg0 = tem;
7701 /* arg1 should be the first argument of the new T. */
7702 arg1 = TREE_OPERAND (t, 1);
7703 STRIP_NOPS (arg1);
7704 }
7705 }
7706
7707 /* Convert A ? 1 : 0 to simply A. */
7708 if (integer_onep (TREE_OPERAND (t, 1))
7709 && integer_zerop (TREE_OPERAND (t, 2))
7710 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
7711 call to fold will try to move the conversion inside
7712 a COND, which will recurse. In that case, the COND_EXPR
7713 is probably the best choice, so leave it alone. */
7714 && type == TREE_TYPE (arg0))
7715 return pedantic_non_lvalue (arg0);
7716
7717 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
7718 over COND_EXPR in cases such as floating point comparisons. */
7719 if (integer_zerop (TREE_OPERAND (t, 1))
7720 && integer_onep (TREE_OPERAND (t, 2))
7721 && truth_value_p (TREE_CODE (arg0)))
7722 return pedantic_non_lvalue (convert (type,
7723 invert_truthvalue (arg0)));
7724
7725 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
7726 operation is simply A & 2. */
7727
7728 if (integer_zerop (TREE_OPERAND (t, 2))
7729 && TREE_CODE (arg0) == NE_EXPR
7730 && integer_zerop (TREE_OPERAND (arg0, 1))
7731 && integer_pow2p (arg1)
7732 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
7733 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
7734 arg1, 1))
7735 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
7736
7737 /* Convert A ? B : 0 into A && B if A and B are truth values. */
7738 if (integer_zerop (TREE_OPERAND (t, 2))
7739 && truth_value_p (TREE_CODE (arg0))
7740 && truth_value_p (TREE_CODE (arg1)))
7741 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
7742 arg0, arg1)));
7743
7744 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
7745 if (integer_onep (TREE_OPERAND (t, 2))
7746 && truth_value_p (TREE_CODE (arg0))
7747 && truth_value_p (TREE_CODE (arg1)))
7748 {
7749 /* Only perform transformation if ARG0 is easily inverted. */
7750 tem = invert_truthvalue (arg0);
7751 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7752 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
7753 tem, arg1)));
7754 }
7755
7756 return t;
7757
7758 case COMPOUND_EXPR:
7759 /* When pedantic, a compound expression can be neither an lvalue
7760 nor an integer constant expression. */
7761 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
7762 return t;
7763 /* Don't let (0, 0) be null pointer constant. */
7764 if (integer_zerop (arg1))
7765 return build1 (NOP_EXPR, type, arg1);
7766 return convert (type, arg1);
7767
7768 case COMPLEX_EXPR:
7769 if (wins)
7770 return build_complex (type, arg0, arg1);
7771 return t;
7772
7773 case REALPART_EXPR:
7774 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7775 return t;
7776 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7777 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7778 TREE_OPERAND (arg0, 1));
7779 else if (TREE_CODE (arg0) == COMPLEX_CST)
7780 return TREE_REALPART (arg0);
7781 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7782 return fold (build (TREE_CODE (arg0), type,
7783 fold (build1 (REALPART_EXPR, type,
7784 TREE_OPERAND (arg0, 0))),
7785 fold (build1 (REALPART_EXPR,
7786 type, TREE_OPERAND (arg0, 1)))));
7787 return t;
7788
7789 case IMAGPART_EXPR:
7790 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7791 return convert (type, integer_zero_node);
7792 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7793 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7794 TREE_OPERAND (arg0, 0));
7795 else if (TREE_CODE (arg0) == COMPLEX_CST)
7796 return TREE_IMAGPART (arg0);
7797 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7798 return fold (build (TREE_CODE (arg0), type,
7799 fold (build1 (IMAGPART_EXPR, type,
7800 TREE_OPERAND (arg0, 0))),
7801 fold (build1 (IMAGPART_EXPR, type,
7802 TREE_OPERAND (arg0, 1)))));
7803 return t;
7804
7805 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
7806 appropriate. */
7807 case CLEANUP_POINT_EXPR:
7808 if (! has_cleanups (arg0))
7809 return TREE_OPERAND (t, 0);
7810
7811 {
7812 enum tree_code code0 = TREE_CODE (arg0);
7813 int kind0 = TREE_CODE_CLASS (code0);
7814 tree arg00 = TREE_OPERAND (arg0, 0);
7815 tree arg01;
7816
7817 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
7818 return fold (build1 (code0, type,
7819 fold (build1 (CLEANUP_POINT_EXPR,
7820 TREE_TYPE (arg00), arg00))));
7821
7822 if (kind0 == '<' || kind0 == '2'
7823 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
7824 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
7825 || code0 == TRUTH_XOR_EXPR)
7826 {
7827 arg01 = TREE_OPERAND (arg0, 1);
7828
7829 if (TREE_CONSTANT (arg00)
7830 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
7831 && ! has_cleanups (arg00)))
7832 return fold (build (code0, type, arg00,
7833 fold (build1 (CLEANUP_POINT_EXPR,
7834 TREE_TYPE (arg01), arg01))));
7835
7836 if (TREE_CONSTANT (arg01))
7837 return fold (build (code0, type,
7838 fold (build1 (CLEANUP_POINT_EXPR,
7839 TREE_TYPE (arg00), arg00)),
7840 arg01));
7841 }
7842
7843 return t;
7844 }
7845
7846 case CALL_EXPR:
7847 /* Check for a built-in function. */
7848 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
7849 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
7850 == FUNCTION_DECL)
7851 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
7852 {
7853 tree tmp = fold_builtin (expr);
7854 if (tmp)
7855 return tmp;
7856 }
7857 return t;
7858
7859 default:
7860 return t;
7861 } /* switch (code) */
7862 }
7863
7864 /* Determine if first argument is a multiple of second argument. Return 0 if
7865 it is not, or we cannot easily determined it to be.
7866
7867 An example of the sort of thing we care about (at this point; this routine
7868 could surely be made more general, and expanded to do what the *_DIV_EXPR's
7869 fold cases do now) is discovering that
7870
7871 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7872
7873 is a multiple of
7874
7875 SAVE_EXPR (J * 8)
7876
7877 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
7878
7879 This code also handles discovering that
7880
7881 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7882
7883 is a multiple of 8 so we don't have to worry about dealing with a
7884 possible remainder.
7885
7886 Note that we *look* inside a SAVE_EXPR only to determine how it was
7887 calculated; it is not safe for fold to do much of anything else with the
7888 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
7889 at run time. For example, the latter example above *cannot* be implemented
7890 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
7891 evaluation time of the original SAVE_EXPR is not necessarily the same at
7892 the time the new expression is evaluated. The only optimization of this
7893 sort that would be valid is changing
7894
7895 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
7896
7897 divided by 8 to
7898
7899 SAVE_EXPR (I) * SAVE_EXPR (J)
7900
7901 (where the same SAVE_EXPR (J) is used in the original and the
7902 transformed version). */
7903
7904 static int
7905 multiple_of_p (tree type, tree top, tree bottom)
7906 {
7907 if (operand_equal_p (top, bottom, 0))
7908 return 1;
7909
7910 if (TREE_CODE (type) != INTEGER_TYPE)
7911 return 0;
7912
7913 switch (TREE_CODE (top))
7914 {
7915 case MULT_EXPR:
7916 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7917 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7918
7919 case PLUS_EXPR:
7920 case MINUS_EXPR:
7921 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7922 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7923
7924 case LSHIFT_EXPR:
7925 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
7926 {
7927 tree op1, t1;
7928
7929 op1 = TREE_OPERAND (top, 1);
7930 /* const_binop may not detect overflow correctly,
7931 so check for it explicitly here. */
7932 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
7933 > TREE_INT_CST_LOW (op1)
7934 && TREE_INT_CST_HIGH (op1) == 0
7935 && 0 != (t1 = convert (type,
7936 const_binop (LSHIFT_EXPR, size_one_node,
7937 op1, 0)))
7938 && ! TREE_OVERFLOW (t1))
7939 return multiple_of_p (type, t1, bottom);
7940 }
7941 return 0;
7942
7943 case NOP_EXPR:
7944 /* Can't handle conversions from non-integral or wider integral type. */
7945 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
7946 || (TYPE_PRECISION (type)
7947 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
7948 return 0;
7949
7950 /* .. fall through ... */
7951
7952 case SAVE_EXPR:
7953 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
7954
7955 case INTEGER_CST:
7956 if (TREE_CODE (bottom) != INTEGER_CST
7957 || (TREE_UNSIGNED (type)
7958 && (tree_int_cst_sgn (top) < 0
7959 || tree_int_cst_sgn (bottom) < 0)))
7960 return 0;
7961 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
7962 top, bottom, 0));
7963
7964 default:
7965 return 0;
7966 }
7967 }
7968
7969 /* Return true if `t' is known to be non-negative. */
7970
7971 int
7972 tree_expr_nonnegative_p (tree t)
7973 {
7974 switch (TREE_CODE (t))
7975 {
7976 case ABS_EXPR:
7977 case FFS_EXPR:
7978 case POPCOUNT_EXPR:
7979 case PARITY_EXPR:
7980 return 1;
7981
7982 case CLZ_EXPR:
7983 case CTZ_EXPR:
7984 /* These are undefined at zero. This is true even if
7985 C[LT]Z_DEFINED_VALUE_AT_ZERO is set, since what we're
7986 computing here is a user-visible property. */
7987 return 0;
7988
7989 case INTEGER_CST:
7990 return tree_int_cst_sgn (t) >= 0;
7991
7992 case REAL_CST:
7993 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
7994
7995 case PLUS_EXPR:
7996 if (FLOAT_TYPE_P (TREE_TYPE (t)))
7997 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7998 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7999
8000 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8001 both unsigned and at least 2 bits shorter than the result. */
8002 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8003 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8004 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8005 {
8006 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8007 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8008 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8009 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8010 {
8011 unsigned int prec = MAX (TYPE_PRECISION (inner1),
8012 TYPE_PRECISION (inner2)) + 1;
8013 return prec < TYPE_PRECISION (TREE_TYPE (t));
8014 }
8015 }
8016 break;
8017
8018 case MULT_EXPR:
8019 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8020 {
8021 /* x * x for floating point x is always non-negative. */
8022 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8023 return 1;
8024 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8025 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8026 }
8027
8028 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8029 both unsigned and their total bits is shorter than the result. */
8030 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8031 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8032 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8033 {
8034 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8035 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8036 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8037 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8038 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8039 < TYPE_PRECISION (TREE_TYPE (t));
8040 }
8041 return 0;
8042
8043 case TRUNC_DIV_EXPR:
8044 case CEIL_DIV_EXPR:
8045 case FLOOR_DIV_EXPR:
8046 case ROUND_DIV_EXPR:
8047 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8048 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8049
8050 case TRUNC_MOD_EXPR:
8051 case CEIL_MOD_EXPR:
8052 case FLOOR_MOD_EXPR:
8053 case ROUND_MOD_EXPR:
8054 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8055
8056 case RDIV_EXPR:
8057 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8058 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8059
8060 case NOP_EXPR:
8061 {
8062 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8063 tree outer_type = TREE_TYPE (t);
8064
8065 if (TREE_CODE (outer_type) == REAL_TYPE)
8066 {
8067 if (TREE_CODE (inner_type) == REAL_TYPE)
8068 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8069 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8070 {
8071 if (TREE_UNSIGNED (inner_type))
8072 return 1;
8073 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8074 }
8075 }
8076 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8077 {
8078 if (TREE_CODE (inner_type) == REAL_TYPE)
8079 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8080 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8081 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8082 && TREE_UNSIGNED (inner_type);
8083 }
8084 }
8085 break;
8086
8087 case COND_EXPR:
8088 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8089 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
8090 case COMPOUND_EXPR:
8091 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8092 case MIN_EXPR:
8093 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8094 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8095 case MAX_EXPR:
8096 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8097 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8098 case MODIFY_EXPR:
8099 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8100 case BIND_EXPR:
8101 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8102 case SAVE_EXPR:
8103 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8104 case NON_LVALUE_EXPR:
8105 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8106 case FLOAT_EXPR:
8107 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8108 case RTL_EXPR:
8109 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
8110
8111 case CALL_EXPR:
8112 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
8113 {
8114 tree fndecl = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
8115 tree arglist = TREE_OPERAND (t, 1);
8116 if (TREE_CODE (fndecl) == FUNCTION_DECL
8117 && DECL_BUILT_IN (fndecl)
8118 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
8119 switch (DECL_FUNCTION_CODE (fndecl))
8120 {
8121 case BUILT_IN_CABS:
8122 case BUILT_IN_CABSL:
8123 case BUILT_IN_CABSF:
8124 case BUILT_IN_EXP:
8125 case BUILT_IN_EXPF:
8126 case BUILT_IN_EXPL:
8127 case BUILT_IN_FABS:
8128 case BUILT_IN_FABSF:
8129 case BUILT_IN_FABSL:
8130 case BUILT_IN_SQRT:
8131 case BUILT_IN_SQRTF:
8132 case BUILT_IN_SQRTL:
8133 return 1;
8134
8135 case BUILT_IN_ATAN:
8136 case BUILT_IN_ATANF:
8137 case BUILT_IN_ATANL:
8138 case BUILT_IN_CEIL:
8139 case BUILT_IN_CEILF:
8140 case BUILT_IN_CEILL:
8141 case BUILT_IN_FLOOR:
8142 case BUILT_IN_FLOORF:
8143 case BUILT_IN_FLOORL:
8144 case BUILT_IN_NEARBYINT:
8145 case BUILT_IN_NEARBYINTF:
8146 case BUILT_IN_NEARBYINTL:
8147 case BUILT_IN_ROUND:
8148 case BUILT_IN_ROUNDF:
8149 case BUILT_IN_ROUNDL:
8150 case BUILT_IN_TRUNC:
8151 case BUILT_IN_TRUNCF:
8152 case BUILT_IN_TRUNCL:
8153 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8154
8155 case BUILT_IN_POW:
8156 case BUILT_IN_POWF:
8157 case BUILT_IN_POWL:
8158 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8159
8160 default:
8161 break;
8162 }
8163 }
8164
8165 /* ... fall through ... */
8166
8167 default:
8168 if (truth_value_p (TREE_CODE (t)))
8169 /* Truth values evaluate to 0 or 1, which is nonnegative. */
8170 return 1;
8171 }
8172
8173 /* We don't know sign of `t', so be conservative and return false. */
8174 return 0;
8175 }
8176
8177 /* Return true if `r' is known to be non-negative.
8178 Only handles constants at the moment. */
8179
8180 int
8181 rtl_expr_nonnegative_p (rtx r)
8182 {
8183 switch (GET_CODE (r))
8184 {
8185 case CONST_INT:
8186 return INTVAL (r) >= 0;
8187
8188 case CONST_DOUBLE:
8189 if (GET_MODE (r) == VOIDmode)
8190 return CONST_DOUBLE_HIGH (r) >= 0;
8191 return 0;
8192
8193 case CONST_VECTOR:
8194 {
8195 int units, i;
8196 rtx elt;
8197
8198 units = CONST_VECTOR_NUNITS (r);
8199
8200 for (i = 0; i < units; ++i)
8201 {
8202 elt = CONST_VECTOR_ELT (r, i);
8203 if (!rtl_expr_nonnegative_p (elt))
8204 return 0;
8205 }
8206
8207 return 1;
8208 }
8209
8210 case SYMBOL_REF:
8211 case LABEL_REF:
8212 /* These are always nonnegative. */
8213 return 1;
8214
8215 default:
8216 return 0;
8217 }
8218 }
8219
8220 #include "gt-fold-const.h"