fold-const.c (extract_muldiv, [...]): Detect case when conversion overflows.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "real.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "toplev.h"
56 #include "ggc.h"
57 #include "hashtab.h"
58 #include "langhooks.h"
59
60 static void encode PARAMS ((HOST_WIDE_INT *,
61 unsigned HOST_WIDE_INT,
62 HOST_WIDE_INT));
63 static void decode PARAMS ((HOST_WIDE_INT *,
64 unsigned HOST_WIDE_INT *,
65 HOST_WIDE_INT *));
66 static bool negate_expr_p PARAMS ((tree));
67 static tree negate_expr PARAMS ((tree));
68 static tree split_tree PARAMS ((tree, enum tree_code, tree *, tree *,
69 tree *, int));
70 static tree associate_trees PARAMS ((tree, tree, enum tree_code, tree));
71 static tree int_const_binop PARAMS ((enum tree_code, tree, tree, int));
72 static tree const_binop PARAMS ((enum tree_code, tree, tree, int));
73 static hashval_t size_htab_hash PARAMS ((const void *));
74 static int size_htab_eq PARAMS ((const void *, const void *));
75 static tree fold_convert PARAMS ((tree, tree));
76 static enum tree_code invert_tree_comparison PARAMS ((enum tree_code));
77 static enum tree_code swap_tree_comparison PARAMS ((enum tree_code));
78 static int comparison_to_compcode PARAMS ((enum tree_code));
79 static enum tree_code compcode_to_comparison PARAMS ((int));
80 static int truth_value_p PARAMS ((enum tree_code));
81 static int operand_equal_for_comparison_p PARAMS ((tree, tree, tree));
82 static int twoval_comparison_p PARAMS ((tree, tree *, tree *, int *));
83 static tree eval_subst PARAMS ((tree, tree, tree, tree, tree));
84 static tree pedantic_omit_one_operand PARAMS ((tree, tree, tree));
85 static tree distribute_bit_expr PARAMS ((enum tree_code, tree, tree, tree));
86 static tree make_bit_field_ref PARAMS ((tree, tree, int, int, int));
87 static tree optimize_bit_field_compare PARAMS ((enum tree_code, tree,
88 tree, tree));
89 static tree decode_field_reference PARAMS ((tree, HOST_WIDE_INT *,
90 HOST_WIDE_INT *,
91 enum machine_mode *, int *,
92 int *, tree *, tree *));
93 static int all_ones_mask_p PARAMS ((tree, int));
94 static tree sign_bit_p PARAMS ((tree, tree));
95 static int simple_operand_p PARAMS ((tree));
96 static tree range_binop PARAMS ((enum tree_code, tree, tree, int,
97 tree, int));
98 static tree make_range PARAMS ((tree, int *, tree *, tree *));
99 static tree build_range_check PARAMS ((tree, tree, int, tree, tree));
100 static int merge_ranges PARAMS ((int *, tree *, tree *, int, tree, tree,
101 int, tree, tree));
102 static tree fold_range_test PARAMS ((tree));
103 static tree unextend PARAMS ((tree, int, int, tree));
104 static tree fold_truthop PARAMS ((enum tree_code, tree, tree, tree));
105 static tree optimize_minmax_comparison PARAMS ((tree));
106 static tree extract_muldiv PARAMS ((tree, tree, enum tree_code, tree));
107 static tree extract_muldiv_1 PARAMS ((tree, tree, enum tree_code, tree));
108 static tree strip_compound_expr PARAMS ((tree, tree));
109 static int multiple_of_p PARAMS ((tree, tree, tree));
110 static tree constant_boolean_node PARAMS ((int, tree));
111 static int count_cond PARAMS ((tree, int));
112 static tree fold_binary_op_with_conditional_arg
113 PARAMS ((enum tree_code, tree, tree, tree, int));
114 static bool fold_real_zero_addition_p PARAMS ((tree, tree, int));
115 static tree fold_mathfn_compare PARAMS ((enum built_in_function,
116 enum tree_code, tree, tree, tree));
117 static tree fold_inf_compare PARAMS ((enum tree_code, tree, tree, tree));
118
119 /* The following constants represent a bit based encoding of GCC's
120 comparison operators. This encoding simplifies transformations
121 on relational comparison operators, such as AND and OR. */
122 #define COMPCODE_FALSE 0
123 #define COMPCODE_LT 1
124 #define COMPCODE_EQ 2
125 #define COMPCODE_LE 3
126 #define COMPCODE_GT 4
127 #define COMPCODE_NE 5
128 #define COMPCODE_GE 6
129 #define COMPCODE_TRUE 7
130
131 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
132 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
133 and SUM1. Then this yields nonzero if overflow occurred during the
134 addition.
135
136 Overflow occurs if A and B have the same sign, but A and SUM differ in
137 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
138 sign. */
139 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
140 \f
141 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
142 We do that by representing the two-word integer in 4 words, with only
143 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
144 number. The value of the word is LOWPART + HIGHPART * BASE. */
145
146 #define LOWPART(x) \
147 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
148 #define HIGHPART(x) \
149 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
150 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
151
152 /* Unpack a two-word integer into 4 words.
153 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
154 WORDS points to the array of HOST_WIDE_INTs. */
155
156 static void
157 encode (words, low, hi)
158 HOST_WIDE_INT *words;
159 unsigned HOST_WIDE_INT low;
160 HOST_WIDE_INT hi;
161 {
162 words[0] = LOWPART (low);
163 words[1] = HIGHPART (low);
164 words[2] = LOWPART (hi);
165 words[3] = HIGHPART (hi);
166 }
167
168 /* Pack an array of 4 words into a two-word integer.
169 WORDS points to the array of words.
170 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
171
172 static void
173 decode (words, low, hi)
174 HOST_WIDE_INT *words;
175 unsigned HOST_WIDE_INT *low;
176 HOST_WIDE_INT *hi;
177 {
178 *low = words[0] + words[1] * BASE;
179 *hi = words[2] + words[3] * BASE;
180 }
181 \f
182 /* Make the integer constant T valid for its type by setting to 0 or 1 all
183 the bits in the constant that don't belong in the type.
184
185 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
186 nonzero, a signed overflow has already occurred in calculating T, so
187 propagate it. */
188
189 int
190 force_fit_type (t, overflow)
191 tree t;
192 int overflow;
193 {
194 unsigned HOST_WIDE_INT low;
195 HOST_WIDE_INT high;
196 unsigned int prec;
197
198 if (TREE_CODE (t) == REAL_CST)
199 {
200 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
201 Consider doing it via real_convert now. */
202 return overflow;
203 }
204
205 else if (TREE_CODE (t) != INTEGER_CST)
206 return overflow;
207
208 low = TREE_INT_CST_LOW (t);
209 high = TREE_INT_CST_HIGH (t);
210
211 if (POINTER_TYPE_P (TREE_TYPE (t)))
212 prec = POINTER_SIZE;
213 else
214 prec = TYPE_PRECISION (TREE_TYPE (t));
215
216 /* First clear all bits that are beyond the type's precision. */
217
218 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
219 ;
220 else if (prec > HOST_BITS_PER_WIDE_INT)
221 TREE_INT_CST_HIGH (t)
222 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
223 else
224 {
225 TREE_INT_CST_HIGH (t) = 0;
226 if (prec < HOST_BITS_PER_WIDE_INT)
227 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
228 }
229
230 /* Unsigned types do not suffer sign extension or overflow unless they
231 are a sizetype. */
232 if (TREE_UNSIGNED (TREE_TYPE (t))
233 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
234 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
235 return overflow;
236
237 /* If the value's sign bit is set, extend the sign. */
238 if (prec != 2 * HOST_BITS_PER_WIDE_INT
239 && (prec > HOST_BITS_PER_WIDE_INT
240 ? 0 != (TREE_INT_CST_HIGH (t)
241 & ((HOST_WIDE_INT) 1
242 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243 : 0 != (TREE_INT_CST_LOW (t)
244 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
245 {
246 /* Value is negative:
247 set to 1 all the bits that are outside this type's precision. */
248 if (prec > HOST_BITS_PER_WIDE_INT)
249 TREE_INT_CST_HIGH (t)
250 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
251 else
252 {
253 TREE_INT_CST_HIGH (t) = -1;
254 if (prec < HOST_BITS_PER_WIDE_INT)
255 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
256 }
257 }
258
259 /* Return nonzero if signed overflow occurred. */
260 return
261 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
262 != 0);
263 }
264 \f
265 /* Add two doubleword integers with doubleword result.
266 Each argument is given as two `HOST_WIDE_INT' pieces.
267 One argument is L1 and H1; the other, L2 and H2.
268 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
269
270 int
271 add_double (l1, h1, l2, h2, lv, hv)
272 unsigned HOST_WIDE_INT l1, l2;
273 HOST_WIDE_INT h1, h2;
274 unsigned HOST_WIDE_INT *lv;
275 HOST_WIDE_INT *hv;
276 {
277 unsigned HOST_WIDE_INT l;
278 HOST_WIDE_INT h;
279
280 l = l1 + l2;
281 h = h1 + h2 + (l < l1);
282
283 *lv = l;
284 *hv = h;
285 return OVERFLOW_SUM_SIGN (h1, h2, h);
286 }
287
288 /* Negate a doubleword integer with doubleword result.
289 Return nonzero if the operation overflows, assuming it's signed.
290 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
291 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
292
293 int
294 neg_double (l1, h1, lv, hv)
295 unsigned HOST_WIDE_INT l1;
296 HOST_WIDE_INT h1;
297 unsigned HOST_WIDE_INT *lv;
298 HOST_WIDE_INT *hv;
299 {
300 if (l1 == 0)
301 {
302 *lv = 0;
303 *hv = - h1;
304 return (*hv & h1) < 0;
305 }
306 else
307 {
308 *lv = -l1;
309 *hv = ~h1;
310 return 0;
311 }
312 }
313 \f
314 /* Multiply two doubleword integers with doubleword result.
315 Return nonzero if the operation overflows, assuming it's signed.
316 Each argument is given as two `HOST_WIDE_INT' pieces.
317 One argument is L1 and H1; the other, L2 and H2.
318 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
319
320 int
321 mul_double (l1, h1, l2, h2, lv, hv)
322 unsigned HOST_WIDE_INT l1, l2;
323 HOST_WIDE_INT h1, h2;
324 unsigned HOST_WIDE_INT *lv;
325 HOST_WIDE_INT *hv;
326 {
327 HOST_WIDE_INT arg1[4];
328 HOST_WIDE_INT arg2[4];
329 HOST_WIDE_INT prod[4 * 2];
330 unsigned HOST_WIDE_INT carry;
331 int i, j, k;
332 unsigned HOST_WIDE_INT toplow, neglow;
333 HOST_WIDE_INT tophigh, neghigh;
334
335 encode (arg1, l1, h1);
336 encode (arg2, l2, h2);
337
338 memset ((char *) prod, 0, sizeof prod);
339
340 for (i = 0; i < 4; i++)
341 {
342 carry = 0;
343 for (j = 0; j < 4; j++)
344 {
345 k = i + j;
346 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
347 carry += arg1[i] * arg2[j];
348 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
349 carry += prod[k];
350 prod[k] = LOWPART (carry);
351 carry = HIGHPART (carry);
352 }
353 prod[i + 4] = carry;
354 }
355
356 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
357
358 /* Check for overflow by calculating the top half of the answer in full;
359 it should agree with the low half's sign bit. */
360 decode (prod + 4, &toplow, &tophigh);
361 if (h1 < 0)
362 {
363 neg_double (l2, h2, &neglow, &neghigh);
364 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
365 }
366 if (h2 < 0)
367 {
368 neg_double (l1, h1, &neglow, &neghigh);
369 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
370 }
371 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
372 }
373 \f
374 /* Shift the doubleword integer in L1, H1 left by COUNT places
375 keeping only PREC bits of result.
376 Shift right if COUNT is negative.
377 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
378 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
379
380 void
381 lshift_double (l1, h1, count, prec, lv, hv, arith)
382 unsigned HOST_WIDE_INT l1;
383 HOST_WIDE_INT h1, count;
384 unsigned int prec;
385 unsigned HOST_WIDE_INT *lv;
386 HOST_WIDE_INT *hv;
387 int arith;
388 {
389 unsigned HOST_WIDE_INT signmask;
390
391 if (count < 0)
392 {
393 rshift_double (l1, h1, -count, prec, lv, hv, arith);
394 return;
395 }
396
397 #ifdef SHIFT_COUNT_TRUNCATED
398 if (SHIFT_COUNT_TRUNCATED)
399 count %= prec;
400 #endif
401
402 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
403 {
404 /* Shifting by the host word size is undefined according to the
405 ANSI standard, so we must handle this as a special case. */
406 *hv = 0;
407 *lv = 0;
408 }
409 else if (count >= HOST_BITS_PER_WIDE_INT)
410 {
411 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
412 *lv = 0;
413 }
414 else
415 {
416 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
417 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
418 *lv = l1 << count;
419 }
420
421 /* Sign extend all bits that are beyond the precision. */
422
423 signmask = -((prec > HOST_BITS_PER_WIDE_INT
424 ? ((unsigned HOST_WIDE_INT) *hv
425 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
426 : (*lv >> (prec - 1))) & 1);
427
428 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
429 ;
430 else if (prec >= HOST_BITS_PER_WIDE_INT)
431 {
432 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
433 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
434 }
435 else
436 {
437 *hv = signmask;
438 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
439 *lv |= signmask << prec;
440 }
441 }
442
443 /* Shift the doubleword integer in L1, H1 right by COUNT places
444 keeping only PREC bits of result. COUNT must be positive.
445 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
446 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
447
448 void
449 rshift_double (l1, h1, count, prec, lv, hv, arith)
450 unsigned HOST_WIDE_INT l1;
451 HOST_WIDE_INT h1, count;
452 unsigned int prec;
453 unsigned HOST_WIDE_INT *lv;
454 HOST_WIDE_INT *hv;
455 int arith;
456 {
457 unsigned HOST_WIDE_INT signmask;
458
459 signmask = (arith
460 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
461 : 0);
462
463 #ifdef SHIFT_COUNT_TRUNCATED
464 if (SHIFT_COUNT_TRUNCATED)
465 count %= prec;
466 #endif
467
468 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
469 {
470 /* Shifting by the host word size is undefined according to the
471 ANSI standard, so we must handle this as a special case. */
472 *hv = 0;
473 *lv = 0;
474 }
475 else if (count >= HOST_BITS_PER_WIDE_INT)
476 {
477 *hv = 0;
478 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
479 }
480 else
481 {
482 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
483 *lv = ((l1 >> count)
484 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
485 }
486
487 /* Zero / sign extend all bits that are beyond the precision. */
488
489 if (count >= (HOST_WIDE_INT)prec)
490 {
491 *hv = signmask;
492 *lv = signmask;
493 }
494 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
495 ;
496 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
497 {
498 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
499 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
500 }
501 else
502 {
503 *hv = signmask;
504 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
505 *lv |= signmask << (prec - count);
506 }
507 }
508 \f
509 /* Rotate the doubleword integer in L1, H1 left by COUNT places
510 keeping only PREC bits of result.
511 Rotate right if COUNT is negative.
512 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
513
514 void
515 lrotate_double (l1, h1, count, prec, lv, hv)
516 unsigned HOST_WIDE_INT l1;
517 HOST_WIDE_INT h1, count;
518 unsigned int prec;
519 unsigned HOST_WIDE_INT *lv;
520 HOST_WIDE_INT *hv;
521 {
522 unsigned HOST_WIDE_INT s1l, s2l;
523 HOST_WIDE_INT s1h, s2h;
524
525 count %= prec;
526 if (count < 0)
527 count += prec;
528
529 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
530 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
531 *lv = s1l | s2l;
532 *hv = s1h | s2h;
533 }
534
535 /* Rotate the doubleword integer in L1, H1 left by COUNT places
536 keeping only PREC bits of result. COUNT must be positive.
537 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
538
539 void
540 rrotate_double (l1, h1, count, prec, lv, hv)
541 unsigned HOST_WIDE_INT l1;
542 HOST_WIDE_INT h1, count;
543 unsigned int prec;
544 unsigned HOST_WIDE_INT *lv;
545 HOST_WIDE_INT *hv;
546 {
547 unsigned HOST_WIDE_INT s1l, s2l;
548 HOST_WIDE_INT s1h, s2h;
549
550 count %= prec;
551 if (count < 0)
552 count += prec;
553
554 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
555 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
556 *lv = s1l | s2l;
557 *hv = s1h | s2h;
558 }
559 \f
560 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
561 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
562 CODE is a tree code for a kind of division, one of
563 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
564 or EXACT_DIV_EXPR
565 It controls how the quotient is rounded to an integer.
566 Return nonzero if the operation overflows.
567 UNS nonzero says do unsigned division. */
568
569 int
570 div_and_round_double (code, uns,
571 lnum_orig, hnum_orig, lden_orig, hden_orig,
572 lquo, hquo, lrem, hrem)
573 enum tree_code code;
574 int uns;
575 unsigned HOST_WIDE_INT lnum_orig; /* num == numerator == dividend */
576 HOST_WIDE_INT hnum_orig;
577 unsigned HOST_WIDE_INT lden_orig; /* den == denominator == divisor */
578 HOST_WIDE_INT hden_orig;
579 unsigned HOST_WIDE_INT *lquo, *lrem;
580 HOST_WIDE_INT *hquo, *hrem;
581 {
582 int quo_neg = 0;
583 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
584 HOST_WIDE_INT den[4], quo[4];
585 int i, j;
586 unsigned HOST_WIDE_INT work;
587 unsigned HOST_WIDE_INT carry = 0;
588 unsigned HOST_WIDE_INT lnum = lnum_orig;
589 HOST_WIDE_INT hnum = hnum_orig;
590 unsigned HOST_WIDE_INT lden = lden_orig;
591 HOST_WIDE_INT hden = hden_orig;
592 int overflow = 0;
593
594 if (hden == 0 && lden == 0)
595 overflow = 1, lden = 1;
596
597 /* calculate quotient sign and convert operands to unsigned. */
598 if (!uns)
599 {
600 if (hnum < 0)
601 {
602 quo_neg = ~ quo_neg;
603 /* (minimum integer) / (-1) is the only overflow case. */
604 if (neg_double (lnum, hnum, &lnum, &hnum)
605 && ((HOST_WIDE_INT) lden & hden) == -1)
606 overflow = 1;
607 }
608 if (hden < 0)
609 {
610 quo_neg = ~ quo_neg;
611 neg_double (lden, hden, &lden, &hden);
612 }
613 }
614
615 if (hnum == 0 && hden == 0)
616 { /* single precision */
617 *hquo = *hrem = 0;
618 /* This unsigned division rounds toward zero. */
619 *lquo = lnum / lden;
620 goto finish_up;
621 }
622
623 if (hnum == 0)
624 { /* trivial case: dividend < divisor */
625 /* hden != 0 already checked. */
626 *hquo = *lquo = 0;
627 *hrem = hnum;
628 *lrem = lnum;
629 goto finish_up;
630 }
631
632 memset ((char *) quo, 0, sizeof quo);
633
634 memset ((char *) num, 0, sizeof num); /* to zero 9th element */
635 memset ((char *) den, 0, sizeof den);
636
637 encode (num, lnum, hnum);
638 encode (den, lden, hden);
639
640 /* Special code for when the divisor < BASE. */
641 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
642 {
643 /* hnum != 0 already checked. */
644 for (i = 4 - 1; i >= 0; i--)
645 {
646 work = num[i] + carry * BASE;
647 quo[i] = work / lden;
648 carry = work % lden;
649 }
650 }
651 else
652 {
653 /* Full double precision division,
654 with thanks to Don Knuth's "Seminumerical Algorithms". */
655 int num_hi_sig, den_hi_sig;
656 unsigned HOST_WIDE_INT quo_est, scale;
657
658 /* Find the highest nonzero divisor digit. */
659 for (i = 4 - 1;; i--)
660 if (den[i] != 0)
661 {
662 den_hi_sig = i;
663 break;
664 }
665
666 /* Insure that the first digit of the divisor is at least BASE/2.
667 This is required by the quotient digit estimation algorithm. */
668
669 scale = BASE / (den[den_hi_sig] + 1);
670 if (scale > 1)
671 { /* scale divisor and dividend */
672 carry = 0;
673 for (i = 0; i <= 4 - 1; i++)
674 {
675 work = (num[i] * scale) + carry;
676 num[i] = LOWPART (work);
677 carry = HIGHPART (work);
678 }
679
680 num[4] = carry;
681 carry = 0;
682 for (i = 0; i <= 4 - 1; i++)
683 {
684 work = (den[i] * scale) + carry;
685 den[i] = LOWPART (work);
686 carry = HIGHPART (work);
687 if (den[i] != 0) den_hi_sig = i;
688 }
689 }
690
691 num_hi_sig = 4;
692
693 /* Main loop */
694 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
695 {
696 /* Guess the next quotient digit, quo_est, by dividing the first
697 two remaining dividend digits by the high order quotient digit.
698 quo_est is never low and is at most 2 high. */
699 unsigned HOST_WIDE_INT tmp;
700
701 num_hi_sig = i + den_hi_sig + 1;
702 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
703 if (num[num_hi_sig] != den[den_hi_sig])
704 quo_est = work / den[den_hi_sig];
705 else
706 quo_est = BASE - 1;
707
708 /* Refine quo_est so it's usually correct, and at most one high. */
709 tmp = work - quo_est * den[den_hi_sig];
710 if (tmp < BASE
711 && (den[den_hi_sig - 1] * quo_est
712 > (tmp * BASE + num[num_hi_sig - 2])))
713 quo_est--;
714
715 /* Try QUO_EST as the quotient digit, by multiplying the
716 divisor by QUO_EST and subtracting from the remaining dividend.
717 Keep in mind that QUO_EST is the I - 1st digit. */
718
719 carry = 0;
720 for (j = 0; j <= den_hi_sig; j++)
721 {
722 work = quo_est * den[j] + carry;
723 carry = HIGHPART (work);
724 work = num[i + j] - LOWPART (work);
725 num[i + j] = LOWPART (work);
726 carry += HIGHPART (work) != 0;
727 }
728
729 /* If quo_est was high by one, then num[i] went negative and
730 we need to correct things. */
731 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
732 {
733 quo_est--;
734 carry = 0; /* add divisor back in */
735 for (j = 0; j <= den_hi_sig; j++)
736 {
737 work = num[i + j] + den[j] + carry;
738 carry = HIGHPART (work);
739 num[i + j] = LOWPART (work);
740 }
741
742 num [num_hi_sig] += carry;
743 }
744
745 /* Store the quotient digit. */
746 quo[i] = quo_est;
747 }
748 }
749
750 decode (quo, lquo, hquo);
751
752 finish_up:
753 /* if result is negative, make it so. */
754 if (quo_neg)
755 neg_double (*lquo, *hquo, lquo, hquo);
756
757 /* compute trial remainder: rem = num - (quo * den) */
758 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
759 neg_double (*lrem, *hrem, lrem, hrem);
760 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
761
762 switch (code)
763 {
764 case TRUNC_DIV_EXPR:
765 case TRUNC_MOD_EXPR: /* round toward zero */
766 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
767 return overflow;
768
769 case FLOOR_DIV_EXPR:
770 case FLOOR_MOD_EXPR: /* round toward negative infinity */
771 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
772 {
773 /* quo = quo - 1; */
774 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
775 lquo, hquo);
776 }
777 else
778 return overflow;
779 break;
780
781 case CEIL_DIV_EXPR:
782 case CEIL_MOD_EXPR: /* round toward positive infinity */
783 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
784 {
785 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
786 lquo, hquo);
787 }
788 else
789 return overflow;
790 break;
791
792 case ROUND_DIV_EXPR:
793 case ROUND_MOD_EXPR: /* round to closest integer */
794 {
795 unsigned HOST_WIDE_INT labs_rem = *lrem;
796 HOST_WIDE_INT habs_rem = *hrem;
797 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
798 HOST_WIDE_INT habs_den = hden, htwice;
799
800 /* Get absolute values */
801 if (*hrem < 0)
802 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
803 if (hden < 0)
804 neg_double (lden, hden, &labs_den, &habs_den);
805
806 /* If (2 * abs (lrem) >= abs (lden)) */
807 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
808 labs_rem, habs_rem, &ltwice, &htwice);
809
810 if (((unsigned HOST_WIDE_INT) habs_den
811 < (unsigned HOST_WIDE_INT) htwice)
812 || (((unsigned HOST_WIDE_INT) habs_den
813 == (unsigned HOST_WIDE_INT) htwice)
814 && (labs_den < ltwice)))
815 {
816 if (*hquo < 0)
817 /* quo = quo - 1; */
818 add_double (*lquo, *hquo,
819 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
820 else
821 /* quo = quo + 1; */
822 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
823 lquo, hquo);
824 }
825 else
826 return overflow;
827 }
828 break;
829
830 default:
831 abort ();
832 }
833
834 /* compute true remainder: rem = num - (quo * den) */
835 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
836 neg_double (*lrem, *hrem, lrem, hrem);
837 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
838 return overflow;
839 }
840 \f
841 /* Determine whether an expression T can be cheaply negated using
842 the function negate_expr. */
843
844 static bool
845 negate_expr_p (t)
846 tree t;
847 {
848 unsigned HOST_WIDE_INT val;
849 unsigned int prec;
850 tree type;
851
852 if (t == 0)
853 return false;
854
855 type = TREE_TYPE (t);
856
857 STRIP_SIGN_NOPS (t);
858 switch (TREE_CODE (t))
859 {
860 case INTEGER_CST:
861 if (TREE_UNSIGNED (type))
862 return false;
863
864 /* Check that -CST will not overflow type. */
865 prec = TYPE_PRECISION (type);
866 if (prec > HOST_BITS_PER_WIDE_INT)
867 {
868 if (TREE_INT_CST_LOW (t) != 0)
869 return true;
870 prec -= HOST_BITS_PER_WIDE_INT;
871 val = TREE_INT_CST_HIGH (t);
872 }
873 else
874 val = TREE_INT_CST_LOW (t);
875 if (prec < HOST_BITS_PER_WIDE_INT)
876 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
877 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
878
879 case REAL_CST:
880 case NEGATE_EXPR:
881 case MINUS_EXPR:
882 return true;
883
884 default:
885 break;
886 }
887 return false;
888 }
889
890 /* Given T, an expression, return the negation of T. Allow for T to be
891 null, in which case return null. */
892
893 static tree
894 negate_expr (t)
895 tree t;
896 {
897 tree type;
898 tree tem;
899
900 if (t == 0)
901 return 0;
902
903 type = TREE_TYPE (t);
904 STRIP_SIGN_NOPS (t);
905
906 switch (TREE_CODE (t))
907 {
908 case INTEGER_CST:
909 case REAL_CST:
910 if (! TREE_UNSIGNED (type)
911 && 0 != (tem = fold (build1 (NEGATE_EXPR, type, t)))
912 && ! TREE_OVERFLOW (tem))
913 return tem;
914 break;
915
916 case NEGATE_EXPR:
917 return convert (type, TREE_OPERAND (t, 0));
918
919 case MINUS_EXPR:
920 /* - (A - B) -> B - A */
921 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
922 return convert (type,
923 fold (build (MINUS_EXPR, TREE_TYPE (t),
924 TREE_OPERAND (t, 1),
925 TREE_OPERAND (t, 0))));
926 break;
927
928 default:
929 break;
930 }
931
932 return convert (type, fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t)));
933 }
934 \f
935 /* Split a tree IN into a constant, literal and variable parts that could be
936 combined with CODE to make IN. "constant" means an expression with
937 TREE_CONSTANT but that isn't an actual constant. CODE must be a
938 commutative arithmetic operation. Store the constant part into *CONP,
939 the literal in *LITP and return the variable part. If a part isn't
940 present, set it to null. If the tree does not decompose in this way,
941 return the entire tree as the variable part and the other parts as null.
942
943 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
944 case, we negate an operand that was subtracted. Except if it is a
945 literal for which we use *MINUS_LITP instead.
946
947 If NEGATE_P is true, we are negating all of IN, again except a literal
948 for which we use *MINUS_LITP instead.
949
950 If IN is itself a literal or constant, return it as appropriate.
951
952 Note that we do not guarantee that any of the three values will be the
953 same type as IN, but they will have the same signedness and mode. */
954
955 static tree
956 split_tree (in, code, conp, litp, minus_litp, negate_p)
957 tree in;
958 enum tree_code code;
959 tree *conp, *litp, *minus_litp;
960 int negate_p;
961 {
962 tree var = 0;
963
964 *conp = 0;
965 *litp = 0;
966 *minus_litp = 0;
967
968 /* Strip any conversions that don't change the machine mode or signedness. */
969 STRIP_SIGN_NOPS (in);
970
971 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
972 *litp = in;
973 else if (TREE_CODE (in) == code
974 || (! FLOAT_TYPE_P (TREE_TYPE (in))
975 /* We can associate addition and subtraction together (even
976 though the C standard doesn't say so) for integers because
977 the value is not affected. For reals, the value might be
978 affected, so we can't. */
979 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
980 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
981 {
982 tree op0 = TREE_OPERAND (in, 0);
983 tree op1 = TREE_OPERAND (in, 1);
984 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
985 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
986
987 /* First see if either of the operands is a literal, then a constant. */
988 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
989 *litp = op0, op0 = 0;
990 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
991 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
992
993 if (op0 != 0 && TREE_CONSTANT (op0))
994 *conp = op0, op0 = 0;
995 else if (op1 != 0 && TREE_CONSTANT (op1))
996 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
997
998 /* If we haven't dealt with either operand, this is not a case we can
999 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1000 if (op0 != 0 && op1 != 0)
1001 var = in;
1002 else if (op0 != 0)
1003 var = op0;
1004 else
1005 var = op1, neg_var_p = neg1_p;
1006
1007 /* Now do any needed negations. */
1008 if (neg_litp_p)
1009 *minus_litp = *litp, *litp = 0;
1010 if (neg_conp_p)
1011 *conp = negate_expr (*conp);
1012 if (neg_var_p)
1013 var = negate_expr (var);
1014 }
1015 else if (TREE_CONSTANT (in))
1016 *conp = in;
1017 else
1018 var = in;
1019
1020 if (negate_p)
1021 {
1022 if (*litp)
1023 *minus_litp = *litp, *litp = 0;
1024 else if (*minus_litp)
1025 *litp = *minus_litp, *minus_litp = 0;
1026 *conp = negate_expr (*conp);
1027 var = negate_expr (var);
1028 }
1029
1030 return var;
1031 }
1032
1033 /* Re-associate trees split by the above function. T1 and T2 are either
1034 expressions to associate or null. Return the new expression, if any. If
1035 we build an operation, do it in TYPE and with CODE. */
1036
1037 static tree
1038 associate_trees (t1, t2, code, type)
1039 tree t1, t2;
1040 enum tree_code code;
1041 tree type;
1042 {
1043 if (t1 == 0)
1044 return t2;
1045 else if (t2 == 0)
1046 return t1;
1047
1048 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1049 try to fold this since we will have infinite recursion. But do
1050 deal with any NEGATE_EXPRs. */
1051 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1052 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1053 {
1054 if (code == PLUS_EXPR)
1055 {
1056 if (TREE_CODE (t1) == NEGATE_EXPR)
1057 return build (MINUS_EXPR, type, convert (type, t2),
1058 convert (type, TREE_OPERAND (t1, 0)));
1059 else if (TREE_CODE (t2) == NEGATE_EXPR)
1060 return build (MINUS_EXPR, type, convert (type, t1),
1061 convert (type, TREE_OPERAND (t2, 0)));
1062 }
1063 return build (code, type, convert (type, t1), convert (type, t2));
1064 }
1065
1066 return fold (build (code, type, convert (type, t1), convert (type, t2)));
1067 }
1068 \f
1069 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1070 to produce a new constant.
1071
1072 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1073
1074 static tree
1075 int_const_binop (code, arg1, arg2, notrunc)
1076 enum tree_code code;
1077 tree arg1, arg2;
1078 int notrunc;
1079 {
1080 unsigned HOST_WIDE_INT int1l, int2l;
1081 HOST_WIDE_INT int1h, int2h;
1082 unsigned HOST_WIDE_INT low;
1083 HOST_WIDE_INT hi;
1084 unsigned HOST_WIDE_INT garbagel;
1085 HOST_WIDE_INT garbageh;
1086 tree t;
1087 tree type = TREE_TYPE (arg1);
1088 int uns = TREE_UNSIGNED (type);
1089 int is_sizetype
1090 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1091 int overflow = 0;
1092 int no_overflow = 0;
1093
1094 int1l = TREE_INT_CST_LOW (arg1);
1095 int1h = TREE_INT_CST_HIGH (arg1);
1096 int2l = TREE_INT_CST_LOW (arg2);
1097 int2h = TREE_INT_CST_HIGH (arg2);
1098
1099 switch (code)
1100 {
1101 case BIT_IOR_EXPR:
1102 low = int1l | int2l, hi = int1h | int2h;
1103 break;
1104
1105 case BIT_XOR_EXPR:
1106 low = int1l ^ int2l, hi = int1h ^ int2h;
1107 break;
1108
1109 case BIT_AND_EXPR:
1110 low = int1l & int2l, hi = int1h & int2h;
1111 break;
1112
1113 case BIT_ANDTC_EXPR:
1114 low = int1l & ~int2l, hi = int1h & ~int2h;
1115 break;
1116
1117 case RSHIFT_EXPR:
1118 int2l = -int2l;
1119 case LSHIFT_EXPR:
1120 /* It's unclear from the C standard whether shifts can overflow.
1121 The following code ignores overflow; perhaps a C standard
1122 interpretation ruling is needed. */
1123 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1124 &low, &hi, !uns);
1125 no_overflow = 1;
1126 break;
1127
1128 case RROTATE_EXPR:
1129 int2l = - int2l;
1130 case LROTATE_EXPR:
1131 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1132 &low, &hi);
1133 break;
1134
1135 case PLUS_EXPR:
1136 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1137 break;
1138
1139 case MINUS_EXPR:
1140 neg_double (int2l, int2h, &low, &hi);
1141 add_double (int1l, int1h, low, hi, &low, &hi);
1142 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1143 break;
1144
1145 case MULT_EXPR:
1146 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1147 break;
1148
1149 case TRUNC_DIV_EXPR:
1150 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1151 case EXACT_DIV_EXPR:
1152 /* This is a shortcut for a common special case. */
1153 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1154 && ! TREE_CONSTANT_OVERFLOW (arg1)
1155 && ! TREE_CONSTANT_OVERFLOW (arg2)
1156 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1157 {
1158 if (code == CEIL_DIV_EXPR)
1159 int1l += int2l - 1;
1160
1161 low = int1l / int2l, hi = 0;
1162 break;
1163 }
1164
1165 /* ... fall through ... */
1166
1167 case ROUND_DIV_EXPR:
1168 if (int2h == 0 && int2l == 1)
1169 {
1170 low = int1l, hi = int1h;
1171 break;
1172 }
1173 if (int1l == int2l && int1h == int2h
1174 && ! (int1l == 0 && int1h == 0))
1175 {
1176 low = 1, hi = 0;
1177 break;
1178 }
1179 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1180 &low, &hi, &garbagel, &garbageh);
1181 break;
1182
1183 case TRUNC_MOD_EXPR:
1184 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1185 /* This is a shortcut for a common special case. */
1186 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1187 && ! TREE_CONSTANT_OVERFLOW (arg1)
1188 && ! TREE_CONSTANT_OVERFLOW (arg2)
1189 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1190 {
1191 if (code == CEIL_MOD_EXPR)
1192 int1l += int2l - 1;
1193 low = int1l % int2l, hi = 0;
1194 break;
1195 }
1196
1197 /* ... fall through ... */
1198
1199 case ROUND_MOD_EXPR:
1200 overflow = div_and_round_double (code, uns,
1201 int1l, int1h, int2l, int2h,
1202 &garbagel, &garbageh, &low, &hi);
1203 break;
1204
1205 case MIN_EXPR:
1206 case MAX_EXPR:
1207 if (uns)
1208 low = (((unsigned HOST_WIDE_INT) int1h
1209 < (unsigned HOST_WIDE_INT) int2h)
1210 || (((unsigned HOST_WIDE_INT) int1h
1211 == (unsigned HOST_WIDE_INT) int2h)
1212 && int1l < int2l));
1213 else
1214 low = (int1h < int2h
1215 || (int1h == int2h && int1l < int2l));
1216
1217 if (low == (code == MIN_EXPR))
1218 low = int1l, hi = int1h;
1219 else
1220 low = int2l, hi = int2h;
1221 break;
1222
1223 default:
1224 abort ();
1225 }
1226
1227 /* If this is for a sizetype, can be represented as one (signed)
1228 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1229 constants. */
1230 if (is_sizetype
1231 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1232 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1233 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1234 return size_int_type_wide (low, type);
1235 else
1236 {
1237 t = build_int_2 (low, hi);
1238 TREE_TYPE (t) = TREE_TYPE (arg1);
1239 }
1240
1241 TREE_OVERFLOW (t)
1242 = ((notrunc
1243 ? (!uns || is_sizetype) && overflow
1244 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1245 && ! no_overflow))
1246 | TREE_OVERFLOW (arg1)
1247 | TREE_OVERFLOW (arg2));
1248
1249 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1250 So check if force_fit_type truncated the value. */
1251 if (is_sizetype
1252 && ! TREE_OVERFLOW (t)
1253 && (TREE_INT_CST_HIGH (t) != hi
1254 || TREE_INT_CST_LOW (t) != low))
1255 TREE_OVERFLOW (t) = 1;
1256
1257 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1258 | TREE_CONSTANT_OVERFLOW (arg1)
1259 | TREE_CONSTANT_OVERFLOW (arg2));
1260 return t;
1261 }
1262
1263 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1264 constant. We assume ARG1 and ARG2 have the same data type, or at least
1265 are the same kind of constant and the same machine mode.
1266
1267 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1268
1269 static tree
1270 const_binop (code, arg1, arg2, notrunc)
1271 enum tree_code code;
1272 tree arg1, arg2;
1273 int notrunc;
1274 {
1275 STRIP_NOPS (arg1);
1276 STRIP_NOPS (arg2);
1277
1278 if (TREE_CODE (arg1) == INTEGER_CST)
1279 return int_const_binop (code, arg1, arg2, notrunc);
1280
1281 if (TREE_CODE (arg1) == REAL_CST)
1282 {
1283 REAL_VALUE_TYPE d1;
1284 REAL_VALUE_TYPE d2;
1285 REAL_VALUE_TYPE value;
1286 tree t;
1287
1288 d1 = TREE_REAL_CST (arg1);
1289 d2 = TREE_REAL_CST (arg2);
1290
1291 /* If either operand is a NaN, just return it. Otherwise, set up
1292 for floating-point trap; we return an overflow. */
1293 if (REAL_VALUE_ISNAN (d1))
1294 return arg1;
1295 else if (REAL_VALUE_ISNAN (d2))
1296 return arg2;
1297
1298 REAL_ARITHMETIC (value, code, d1, d2);
1299
1300 t = build_real (TREE_TYPE (arg1),
1301 real_value_truncate (TYPE_MODE (TREE_TYPE (arg1)),
1302 value));
1303
1304 TREE_OVERFLOW (t)
1305 = (force_fit_type (t, 0)
1306 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1307 TREE_CONSTANT_OVERFLOW (t)
1308 = TREE_OVERFLOW (t)
1309 | TREE_CONSTANT_OVERFLOW (arg1)
1310 | TREE_CONSTANT_OVERFLOW (arg2);
1311 return t;
1312 }
1313 if (TREE_CODE (arg1) == COMPLEX_CST)
1314 {
1315 tree type = TREE_TYPE (arg1);
1316 tree r1 = TREE_REALPART (arg1);
1317 tree i1 = TREE_IMAGPART (arg1);
1318 tree r2 = TREE_REALPART (arg2);
1319 tree i2 = TREE_IMAGPART (arg2);
1320 tree t;
1321
1322 switch (code)
1323 {
1324 case PLUS_EXPR:
1325 t = build_complex (type,
1326 const_binop (PLUS_EXPR, r1, r2, notrunc),
1327 const_binop (PLUS_EXPR, i1, i2, notrunc));
1328 break;
1329
1330 case MINUS_EXPR:
1331 t = build_complex (type,
1332 const_binop (MINUS_EXPR, r1, r2, notrunc),
1333 const_binop (MINUS_EXPR, i1, i2, notrunc));
1334 break;
1335
1336 case MULT_EXPR:
1337 t = build_complex (type,
1338 const_binop (MINUS_EXPR,
1339 const_binop (MULT_EXPR,
1340 r1, r2, notrunc),
1341 const_binop (MULT_EXPR,
1342 i1, i2, notrunc),
1343 notrunc),
1344 const_binop (PLUS_EXPR,
1345 const_binop (MULT_EXPR,
1346 r1, i2, notrunc),
1347 const_binop (MULT_EXPR,
1348 i1, r2, notrunc),
1349 notrunc));
1350 break;
1351
1352 case RDIV_EXPR:
1353 {
1354 tree magsquared
1355 = const_binop (PLUS_EXPR,
1356 const_binop (MULT_EXPR, r2, r2, notrunc),
1357 const_binop (MULT_EXPR, i2, i2, notrunc),
1358 notrunc);
1359
1360 t = build_complex (type,
1361 const_binop
1362 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1363 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1364 const_binop (PLUS_EXPR,
1365 const_binop (MULT_EXPR, r1, r2,
1366 notrunc),
1367 const_binop (MULT_EXPR, i1, i2,
1368 notrunc),
1369 notrunc),
1370 magsquared, notrunc),
1371 const_binop
1372 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1373 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1374 const_binop (MINUS_EXPR,
1375 const_binop (MULT_EXPR, i1, r2,
1376 notrunc),
1377 const_binop (MULT_EXPR, r1, i2,
1378 notrunc),
1379 notrunc),
1380 magsquared, notrunc));
1381 }
1382 break;
1383
1384 default:
1385 abort ();
1386 }
1387 return t;
1388 }
1389 return 0;
1390 }
1391
1392 /* These are the hash table functions for the hash table of INTEGER_CST
1393 nodes of a sizetype. */
1394
1395 /* Return the hash code code X, an INTEGER_CST. */
1396
1397 static hashval_t
1398 size_htab_hash (x)
1399 const void *x;
1400 {
1401 tree t = (tree) x;
1402
1403 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1404 ^ htab_hash_pointer (TREE_TYPE (t))
1405 ^ (TREE_OVERFLOW (t) << 20));
1406 }
1407
1408 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1409 is the same as that given by *Y, which is the same. */
1410
1411 static int
1412 size_htab_eq (x, y)
1413 const void *x;
1414 const void *y;
1415 {
1416 tree xt = (tree) x;
1417 tree yt = (tree) y;
1418
1419 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1420 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1421 && TREE_TYPE (xt) == TREE_TYPE (yt)
1422 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1423 }
1424 \f
1425 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1426 bits are given by NUMBER and of the sizetype represented by KIND. */
1427
1428 tree
1429 size_int_wide (number, kind)
1430 HOST_WIDE_INT number;
1431 enum size_type_kind kind;
1432 {
1433 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1434 }
1435
1436 /* Likewise, but the desired type is specified explicitly. */
1437
1438 static GTY (()) tree new_const;
1439 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1440 htab_t size_htab;
1441
1442 tree
1443 size_int_type_wide (number, type)
1444 HOST_WIDE_INT number;
1445 tree type;
1446 {
1447 PTR *slot;
1448
1449 if (size_htab == 0)
1450 {
1451 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1452 new_const = make_node (INTEGER_CST);
1453 }
1454
1455 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1456 hash table, we return the value from the hash table. Otherwise, we
1457 place that in the hash table and make a new node for the next time. */
1458 TREE_INT_CST_LOW (new_const) = number;
1459 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1460 TREE_TYPE (new_const) = type;
1461 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1462 = force_fit_type (new_const, 0);
1463
1464 slot = htab_find_slot (size_htab, new_const, INSERT);
1465 if (*slot == 0)
1466 {
1467 tree t = new_const;
1468
1469 *slot = (PTR) new_const;
1470 new_const = make_node (INTEGER_CST);
1471 return t;
1472 }
1473 else
1474 return (tree) *slot;
1475 }
1476
1477 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1478 is a tree code. The type of the result is taken from the operands.
1479 Both must be the same type integer type and it must be a size type.
1480 If the operands are constant, so is the result. */
1481
1482 tree
1483 size_binop (code, arg0, arg1)
1484 enum tree_code code;
1485 tree arg0, arg1;
1486 {
1487 tree type = TREE_TYPE (arg0);
1488
1489 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1490 || type != TREE_TYPE (arg1))
1491 abort ();
1492
1493 /* Handle the special case of two integer constants faster. */
1494 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1495 {
1496 /* And some specific cases even faster than that. */
1497 if (code == PLUS_EXPR && integer_zerop (arg0))
1498 return arg1;
1499 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1500 && integer_zerop (arg1))
1501 return arg0;
1502 else if (code == MULT_EXPR && integer_onep (arg0))
1503 return arg1;
1504
1505 /* Handle general case of two integer constants. */
1506 return int_const_binop (code, arg0, arg1, 0);
1507 }
1508
1509 if (arg0 == error_mark_node || arg1 == error_mark_node)
1510 return error_mark_node;
1511
1512 return fold (build (code, type, arg0, arg1));
1513 }
1514
1515 /* Given two values, either both of sizetype or both of bitsizetype,
1516 compute the difference between the two values. Return the value
1517 in signed type corresponding to the type of the operands. */
1518
1519 tree
1520 size_diffop (arg0, arg1)
1521 tree arg0, arg1;
1522 {
1523 tree type = TREE_TYPE (arg0);
1524 tree ctype;
1525
1526 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1527 || type != TREE_TYPE (arg1))
1528 abort ();
1529
1530 /* If the type is already signed, just do the simple thing. */
1531 if (! TREE_UNSIGNED (type))
1532 return size_binop (MINUS_EXPR, arg0, arg1);
1533
1534 ctype = (type == bitsizetype || type == ubitsizetype
1535 ? sbitsizetype : ssizetype);
1536
1537 /* If either operand is not a constant, do the conversions to the signed
1538 type and subtract. The hardware will do the right thing with any
1539 overflow in the subtraction. */
1540 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1541 return size_binop (MINUS_EXPR, convert (ctype, arg0),
1542 convert (ctype, arg1));
1543
1544 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1545 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1546 overflow) and negate (which can't either). Special-case a result
1547 of zero while we're here. */
1548 if (tree_int_cst_equal (arg0, arg1))
1549 return convert (ctype, integer_zero_node);
1550 else if (tree_int_cst_lt (arg1, arg0))
1551 return convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1552 else
1553 return size_binop (MINUS_EXPR, convert (ctype, integer_zero_node),
1554 convert (ctype, size_binop (MINUS_EXPR, arg1, arg0)));
1555 }
1556 \f
1557
1558 /* Given T, a tree representing type conversion of ARG1, a constant,
1559 return a constant tree representing the result of conversion. */
1560
1561 static tree
1562 fold_convert (t, arg1)
1563 tree t;
1564 tree arg1;
1565 {
1566 tree type = TREE_TYPE (t);
1567 int overflow = 0;
1568
1569 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1570 {
1571 if (TREE_CODE (arg1) == INTEGER_CST)
1572 {
1573 /* If we would build a constant wider than GCC supports,
1574 leave the conversion unfolded. */
1575 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1576 return t;
1577
1578 /* If we are trying to make a sizetype for a small integer, use
1579 size_int to pick up cached types to reduce duplicate nodes. */
1580 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1581 && !TREE_CONSTANT_OVERFLOW (arg1)
1582 && compare_tree_int (arg1, 10000) < 0)
1583 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1584
1585 /* Given an integer constant, make new constant with new type,
1586 appropriately sign-extended or truncated. */
1587 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1588 TREE_INT_CST_HIGH (arg1));
1589 TREE_TYPE (t) = type;
1590 /* Indicate an overflow if (1) ARG1 already overflowed,
1591 or (2) force_fit_type indicates an overflow.
1592 Tell force_fit_type that an overflow has already occurred
1593 if ARG1 is a too-large unsigned value and T is signed.
1594 But don't indicate an overflow if converting a pointer. */
1595 TREE_OVERFLOW (t)
1596 = ((force_fit_type (t,
1597 (TREE_INT_CST_HIGH (arg1) < 0
1598 && (TREE_UNSIGNED (type)
1599 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1600 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1601 || TREE_OVERFLOW (arg1));
1602 TREE_CONSTANT_OVERFLOW (t)
1603 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1604 }
1605 else if (TREE_CODE (arg1) == REAL_CST)
1606 {
1607 /* Don't initialize these, use assignments.
1608 Initialized local aggregates don't work on old compilers. */
1609 REAL_VALUE_TYPE x;
1610 REAL_VALUE_TYPE l;
1611 REAL_VALUE_TYPE u;
1612 tree type1 = TREE_TYPE (arg1);
1613 int no_upper_bound;
1614
1615 x = TREE_REAL_CST (arg1);
1616 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1617
1618 no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
1619 if (!no_upper_bound)
1620 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1621
1622 /* See if X will be in range after truncation towards 0.
1623 To compensate for truncation, move the bounds away from 0,
1624 but reject if X exactly equals the adjusted bounds. */
1625 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1626 if (!no_upper_bound)
1627 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1628 /* If X is a NaN, use zero instead and show we have an overflow.
1629 Otherwise, range check. */
1630 if (REAL_VALUE_ISNAN (x))
1631 overflow = 1, x = dconst0;
1632 else if (! (REAL_VALUES_LESS (l, x)
1633 && !no_upper_bound
1634 && REAL_VALUES_LESS (x, u)))
1635 overflow = 1;
1636
1637 {
1638 HOST_WIDE_INT low, high;
1639 REAL_VALUE_TO_INT (&low, &high, x);
1640 t = build_int_2 (low, high);
1641 }
1642 TREE_TYPE (t) = type;
1643 TREE_OVERFLOW (t)
1644 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1645 TREE_CONSTANT_OVERFLOW (t)
1646 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1647 }
1648 TREE_TYPE (t) = type;
1649 }
1650 else if (TREE_CODE (type) == REAL_TYPE)
1651 {
1652 if (TREE_CODE (arg1) == INTEGER_CST)
1653 return build_real_from_int_cst (type, arg1);
1654 if (TREE_CODE (arg1) == REAL_CST)
1655 {
1656 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1657 {
1658 /* We make a copy of ARG1 so that we don't modify an
1659 existing constant tree. */
1660 t = copy_node (arg1);
1661 TREE_TYPE (t) = type;
1662 return t;
1663 }
1664
1665 t = build_real (type,
1666 real_value_truncate (TYPE_MODE (type),
1667 TREE_REAL_CST (arg1)));
1668
1669 TREE_OVERFLOW (t)
1670 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1671 TREE_CONSTANT_OVERFLOW (t)
1672 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1673 return t;
1674 }
1675 }
1676 TREE_CONSTANT (t) = 1;
1677 return t;
1678 }
1679 \f
1680 /* Return an expr equal to X but certainly not valid as an lvalue. */
1681
1682 tree
1683 non_lvalue (x)
1684 tree x;
1685 {
1686 tree result;
1687
1688 /* These things are certainly not lvalues. */
1689 if (TREE_CODE (x) == NON_LVALUE_EXPR
1690 || TREE_CODE (x) == INTEGER_CST
1691 || TREE_CODE (x) == REAL_CST
1692 || TREE_CODE (x) == STRING_CST
1693 || TREE_CODE (x) == ADDR_EXPR)
1694 return x;
1695
1696 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1697 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1698 return result;
1699 }
1700
1701 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1702 Zero means allow extended lvalues. */
1703
1704 int pedantic_lvalues;
1705
1706 /* When pedantic, return an expr equal to X but certainly not valid as a
1707 pedantic lvalue. Otherwise, return X. */
1708
1709 tree
1710 pedantic_non_lvalue (x)
1711 tree x;
1712 {
1713 if (pedantic_lvalues)
1714 return non_lvalue (x);
1715 else
1716 return x;
1717 }
1718 \f
1719 /* Given a tree comparison code, return the code that is the logical inverse
1720 of the given code. It is not safe to do this for floating-point
1721 comparisons, except for NE_EXPR and EQ_EXPR. */
1722
1723 static enum tree_code
1724 invert_tree_comparison (code)
1725 enum tree_code code;
1726 {
1727 switch (code)
1728 {
1729 case EQ_EXPR:
1730 return NE_EXPR;
1731 case NE_EXPR:
1732 return EQ_EXPR;
1733 case GT_EXPR:
1734 return LE_EXPR;
1735 case GE_EXPR:
1736 return LT_EXPR;
1737 case LT_EXPR:
1738 return GE_EXPR;
1739 case LE_EXPR:
1740 return GT_EXPR;
1741 default:
1742 abort ();
1743 }
1744 }
1745
1746 /* Similar, but return the comparison that results if the operands are
1747 swapped. This is safe for floating-point. */
1748
1749 static enum tree_code
1750 swap_tree_comparison (code)
1751 enum tree_code code;
1752 {
1753 switch (code)
1754 {
1755 case EQ_EXPR:
1756 case NE_EXPR:
1757 return code;
1758 case GT_EXPR:
1759 return LT_EXPR;
1760 case GE_EXPR:
1761 return LE_EXPR;
1762 case LT_EXPR:
1763 return GT_EXPR;
1764 case LE_EXPR:
1765 return GE_EXPR;
1766 default:
1767 abort ();
1768 }
1769 }
1770
1771
1772 /* Convert a comparison tree code from an enum tree_code representation
1773 into a compcode bit-based encoding. This function is the inverse of
1774 compcode_to_comparison. */
1775
1776 static int
1777 comparison_to_compcode (code)
1778 enum tree_code code;
1779 {
1780 switch (code)
1781 {
1782 case LT_EXPR:
1783 return COMPCODE_LT;
1784 case EQ_EXPR:
1785 return COMPCODE_EQ;
1786 case LE_EXPR:
1787 return COMPCODE_LE;
1788 case GT_EXPR:
1789 return COMPCODE_GT;
1790 case NE_EXPR:
1791 return COMPCODE_NE;
1792 case GE_EXPR:
1793 return COMPCODE_GE;
1794 default:
1795 abort ();
1796 }
1797 }
1798
1799 /* Convert a compcode bit-based encoding of a comparison operator back
1800 to GCC's enum tree_code representation. This function is the
1801 inverse of comparison_to_compcode. */
1802
1803 static enum tree_code
1804 compcode_to_comparison (code)
1805 int code;
1806 {
1807 switch (code)
1808 {
1809 case COMPCODE_LT:
1810 return LT_EXPR;
1811 case COMPCODE_EQ:
1812 return EQ_EXPR;
1813 case COMPCODE_LE:
1814 return LE_EXPR;
1815 case COMPCODE_GT:
1816 return GT_EXPR;
1817 case COMPCODE_NE:
1818 return NE_EXPR;
1819 case COMPCODE_GE:
1820 return GE_EXPR;
1821 default:
1822 abort ();
1823 }
1824 }
1825
1826 /* Return nonzero if CODE is a tree code that represents a truth value. */
1827
1828 static int
1829 truth_value_p (code)
1830 enum tree_code code;
1831 {
1832 return (TREE_CODE_CLASS (code) == '<'
1833 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1834 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1835 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1836 }
1837 \f
1838 /* Return nonzero if two operands are necessarily equal.
1839 If ONLY_CONST is nonzero, only return nonzero for constants.
1840 This function tests whether the operands are indistinguishable;
1841 it does not test whether they are equal using C's == operation.
1842 The distinction is important for IEEE floating point, because
1843 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1844 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1845
1846 int
1847 operand_equal_p (arg0, arg1, only_const)
1848 tree arg0, arg1;
1849 int only_const;
1850 {
1851 /* If both types don't have the same signedness, then we can't consider
1852 them equal. We must check this before the STRIP_NOPS calls
1853 because they may change the signedness of the arguments. */
1854 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1855 return 0;
1856
1857 STRIP_NOPS (arg0);
1858 STRIP_NOPS (arg1);
1859
1860 if (TREE_CODE (arg0) != TREE_CODE (arg1)
1861 /* This is needed for conversions and for COMPONENT_REF.
1862 Might as well play it safe and always test this. */
1863 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
1864 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
1865 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1866 return 0;
1867
1868 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1869 We don't care about side effects in that case because the SAVE_EXPR
1870 takes care of that for us. In all other cases, two expressions are
1871 equal if they have no side effects. If we have two identical
1872 expressions with side effects that should be treated the same due
1873 to the only side effects being identical SAVE_EXPR's, that will
1874 be detected in the recursive calls below. */
1875 if (arg0 == arg1 && ! only_const
1876 && (TREE_CODE (arg0) == SAVE_EXPR
1877 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
1878 return 1;
1879
1880 /* Next handle constant cases, those for which we can return 1 even
1881 if ONLY_CONST is set. */
1882 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
1883 switch (TREE_CODE (arg0))
1884 {
1885 case INTEGER_CST:
1886 return (! TREE_CONSTANT_OVERFLOW (arg0)
1887 && ! TREE_CONSTANT_OVERFLOW (arg1)
1888 && tree_int_cst_equal (arg0, arg1));
1889
1890 case REAL_CST:
1891 return (! TREE_CONSTANT_OVERFLOW (arg0)
1892 && ! TREE_CONSTANT_OVERFLOW (arg1)
1893 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
1894 TREE_REAL_CST (arg1)));
1895
1896 case VECTOR_CST:
1897 {
1898 tree v1, v2;
1899
1900 if (TREE_CONSTANT_OVERFLOW (arg0)
1901 || TREE_CONSTANT_OVERFLOW (arg1))
1902 return 0;
1903
1904 v1 = TREE_VECTOR_CST_ELTS (arg0);
1905 v2 = TREE_VECTOR_CST_ELTS (arg1);
1906 while (v1 && v2)
1907 {
1908 if (!operand_equal_p (v1, v2, only_const))
1909 return 0;
1910 v1 = TREE_CHAIN (v1);
1911 v2 = TREE_CHAIN (v2);
1912 }
1913
1914 return 1;
1915 }
1916
1917 case COMPLEX_CST:
1918 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
1919 only_const)
1920 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
1921 only_const));
1922
1923 case STRING_CST:
1924 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
1925 && ! memcmp (TREE_STRING_POINTER (arg0),
1926 TREE_STRING_POINTER (arg1),
1927 TREE_STRING_LENGTH (arg0)));
1928
1929 case ADDR_EXPR:
1930 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
1931 0);
1932 default:
1933 break;
1934 }
1935
1936 if (only_const)
1937 return 0;
1938
1939 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1940 {
1941 case '1':
1942 /* Two conversions are equal only if signedness and modes match. */
1943 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1944 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1945 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1946 return 0;
1947
1948 return operand_equal_p (TREE_OPERAND (arg0, 0),
1949 TREE_OPERAND (arg1, 0), 0);
1950
1951 case '<':
1952 case '2':
1953 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
1954 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
1955 0))
1956 return 1;
1957
1958 /* For commutative ops, allow the other order. */
1959 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
1960 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
1961 || TREE_CODE (arg0) == BIT_IOR_EXPR
1962 || TREE_CODE (arg0) == BIT_XOR_EXPR
1963 || TREE_CODE (arg0) == BIT_AND_EXPR
1964 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
1965 && operand_equal_p (TREE_OPERAND (arg0, 0),
1966 TREE_OPERAND (arg1, 1), 0)
1967 && operand_equal_p (TREE_OPERAND (arg0, 1),
1968 TREE_OPERAND (arg1, 0), 0));
1969
1970 case 'r':
1971 /* If either of the pointer (or reference) expressions we are dereferencing
1972 contain a side effect, these cannot be equal. */
1973 if (TREE_SIDE_EFFECTS (arg0)
1974 || TREE_SIDE_EFFECTS (arg1))
1975 return 0;
1976
1977 switch (TREE_CODE (arg0))
1978 {
1979 case INDIRECT_REF:
1980 return operand_equal_p (TREE_OPERAND (arg0, 0),
1981 TREE_OPERAND (arg1, 0), 0);
1982
1983 case COMPONENT_REF:
1984 case ARRAY_REF:
1985 case ARRAY_RANGE_REF:
1986 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1987 TREE_OPERAND (arg1, 0), 0)
1988 && operand_equal_p (TREE_OPERAND (arg0, 1),
1989 TREE_OPERAND (arg1, 1), 0));
1990
1991 case BIT_FIELD_REF:
1992 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1993 TREE_OPERAND (arg1, 0), 0)
1994 && operand_equal_p (TREE_OPERAND (arg0, 1),
1995 TREE_OPERAND (arg1, 1), 0)
1996 && operand_equal_p (TREE_OPERAND (arg0, 2),
1997 TREE_OPERAND (arg1, 2), 0));
1998 default:
1999 return 0;
2000 }
2001
2002 case 'e':
2003 if (TREE_CODE (arg0) == RTL_EXPR)
2004 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2005 return 0;
2006
2007 default:
2008 return 0;
2009 }
2010 }
2011 \f
2012 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2013 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2014
2015 When in doubt, return 0. */
2016
2017 static int
2018 operand_equal_for_comparison_p (arg0, arg1, other)
2019 tree arg0, arg1;
2020 tree other;
2021 {
2022 int unsignedp1, unsignedpo;
2023 tree primarg0, primarg1, primother;
2024 unsigned int correct_width;
2025
2026 if (operand_equal_p (arg0, arg1, 0))
2027 return 1;
2028
2029 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2030 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2031 return 0;
2032
2033 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2034 and see if the inner values are the same. This removes any
2035 signedness comparison, which doesn't matter here. */
2036 primarg0 = arg0, primarg1 = arg1;
2037 STRIP_NOPS (primarg0);
2038 STRIP_NOPS (primarg1);
2039 if (operand_equal_p (primarg0, primarg1, 0))
2040 return 1;
2041
2042 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2043 actual comparison operand, ARG0.
2044
2045 First throw away any conversions to wider types
2046 already present in the operands. */
2047
2048 primarg1 = get_narrower (arg1, &unsignedp1);
2049 primother = get_narrower (other, &unsignedpo);
2050
2051 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2052 if (unsignedp1 == unsignedpo
2053 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2054 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2055 {
2056 tree type = TREE_TYPE (arg0);
2057
2058 /* Make sure shorter operand is extended the right way
2059 to match the longer operand. */
2060 primarg1 = convert ((*lang_hooks.types.signed_or_unsigned_type)
2061 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2062
2063 if (operand_equal_p (arg0, convert (type, primarg1), 0))
2064 return 1;
2065 }
2066
2067 return 0;
2068 }
2069 \f
2070 /* See if ARG is an expression that is either a comparison or is performing
2071 arithmetic on comparisons. The comparisons must only be comparing
2072 two different values, which will be stored in *CVAL1 and *CVAL2; if
2073 they are nonzero it means that some operands have already been found.
2074 No variables may be used anywhere else in the expression except in the
2075 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2076 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2077
2078 If this is true, return 1. Otherwise, return zero. */
2079
2080 static int
2081 twoval_comparison_p (arg, cval1, cval2, save_p)
2082 tree arg;
2083 tree *cval1, *cval2;
2084 int *save_p;
2085 {
2086 enum tree_code code = TREE_CODE (arg);
2087 char class = TREE_CODE_CLASS (code);
2088
2089 /* We can handle some of the 'e' cases here. */
2090 if (class == 'e' && code == TRUTH_NOT_EXPR)
2091 class = '1';
2092 else if (class == 'e'
2093 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2094 || code == COMPOUND_EXPR))
2095 class = '2';
2096
2097 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2098 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2099 {
2100 /* If we've already found a CVAL1 or CVAL2, this expression is
2101 two complex to handle. */
2102 if (*cval1 || *cval2)
2103 return 0;
2104
2105 class = '1';
2106 *save_p = 1;
2107 }
2108
2109 switch (class)
2110 {
2111 case '1':
2112 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2113
2114 case '2':
2115 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2116 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2117 cval1, cval2, save_p));
2118
2119 case 'c':
2120 return 1;
2121
2122 case 'e':
2123 if (code == COND_EXPR)
2124 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2125 cval1, cval2, save_p)
2126 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2127 cval1, cval2, save_p)
2128 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2129 cval1, cval2, save_p));
2130 return 0;
2131
2132 case '<':
2133 /* First see if we can handle the first operand, then the second. For
2134 the second operand, we know *CVAL1 can't be zero. It must be that
2135 one side of the comparison is each of the values; test for the
2136 case where this isn't true by failing if the two operands
2137 are the same. */
2138
2139 if (operand_equal_p (TREE_OPERAND (arg, 0),
2140 TREE_OPERAND (arg, 1), 0))
2141 return 0;
2142
2143 if (*cval1 == 0)
2144 *cval1 = TREE_OPERAND (arg, 0);
2145 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2146 ;
2147 else if (*cval2 == 0)
2148 *cval2 = TREE_OPERAND (arg, 0);
2149 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2150 ;
2151 else
2152 return 0;
2153
2154 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2155 ;
2156 else if (*cval2 == 0)
2157 *cval2 = TREE_OPERAND (arg, 1);
2158 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2159 ;
2160 else
2161 return 0;
2162
2163 return 1;
2164
2165 default:
2166 return 0;
2167 }
2168 }
2169 \f
2170 /* ARG is a tree that is known to contain just arithmetic operations and
2171 comparisons. Evaluate the operations in the tree substituting NEW0 for
2172 any occurrence of OLD0 as an operand of a comparison and likewise for
2173 NEW1 and OLD1. */
2174
2175 static tree
2176 eval_subst (arg, old0, new0, old1, new1)
2177 tree arg;
2178 tree old0, new0, old1, new1;
2179 {
2180 tree type = TREE_TYPE (arg);
2181 enum tree_code code = TREE_CODE (arg);
2182 char class = TREE_CODE_CLASS (code);
2183
2184 /* We can handle some of the 'e' cases here. */
2185 if (class == 'e' && code == TRUTH_NOT_EXPR)
2186 class = '1';
2187 else if (class == 'e'
2188 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2189 class = '2';
2190
2191 switch (class)
2192 {
2193 case '1':
2194 return fold (build1 (code, type,
2195 eval_subst (TREE_OPERAND (arg, 0),
2196 old0, new0, old1, new1)));
2197
2198 case '2':
2199 return fold (build (code, type,
2200 eval_subst (TREE_OPERAND (arg, 0),
2201 old0, new0, old1, new1),
2202 eval_subst (TREE_OPERAND (arg, 1),
2203 old0, new0, old1, new1)));
2204
2205 case 'e':
2206 switch (code)
2207 {
2208 case SAVE_EXPR:
2209 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2210
2211 case COMPOUND_EXPR:
2212 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2213
2214 case COND_EXPR:
2215 return fold (build (code, type,
2216 eval_subst (TREE_OPERAND (arg, 0),
2217 old0, new0, old1, new1),
2218 eval_subst (TREE_OPERAND (arg, 1),
2219 old0, new0, old1, new1),
2220 eval_subst (TREE_OPERAND (arg, 2),
2221 old0, new0, old1, new1)));
2222 default:
2223 break;
2224 }
2225 /* fall through - ??? */
2226
2227 case '<':
2228 {
2229 tree arg0 = TREE_OPERAND (arg, 0);
2230 tree arg1 = TREE_OPERAND (arg, 1);
2231
2232 /* We need to check both for exact equality and tree equality. The
2233 former will be true if the operand has a side-effect. In that
2234 case, we know the operand occurred exactly once. */
2235
2236 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2237 arg0 = new0;
2238 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2239 arg0 = new1;
2240
2241 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2242 arg1 = new0;
2243 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2244 arg1 = new1;
2245
2246 return fold (build (code, type, arg0, arg1));
2247 }
2248
2249 default:
2250 return arg;
2251 }
2252 }
2253 \f
2254 /* Return a tree for the case when the result of an expression is RESULT
2255 converted to TYPE and OMITTED was previously an operand of the expression
2256 but is now not needed (e.g., we folded OMITTED * 0).
2257
2258 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2259 the conversion of RESULT to TYPE. */
2260
2261 tree
2262 omit_one_operand (type, result, omitted)
2263 tree type, result, omitted;
2264 {
2265 tree t = convert (type, result);
2266
2267 if (TREE_SIDE_EFFECTS (omitted))
2268 return build (COMPOUND_EXPR, type, omitted, t);
2269
2270 return non_lvalue (t);
2271 }
2272
2273 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2274
2275 static tree
2276 pedantic_omit_one_operand (type, result, omitted)
2277 tree type, result, omitted;
2278 {
2279 tree t = convert (type, result);
2280
2281 if (TREE_SIDE_EFFECTS (omitted))
2282 return build (COMPOUND_EXPR, type, omitted, t);
2283
2284 return pedantic_non_lvalue (t);
2285 }
2286 \f
2287 /* Return a simplified tree node for the truth-negation of ARG. This
2288 never alters ARG itself. We assume that ARG is an operation that
2289 returns a truth value (0 or 1). */
2290
2291 tree
2292 invert_truthvalue (arg)
2293 tree arg;
2294 {
2295 tree type = TREE_TYPE (arg);
2296 enum tree_code code = TREE_CODE (arg);
2297
2298 if (code == ERROR_MARK)
2299 return arg;
2300
2301 /* If this is a comparison, we can simply invert it, except for
2302 floating-point non-equality comparisons, in which case we just
2303 enclose a TRUTH_NOT_EXPR around what we have. */
2304
2305 if (TREE_CODE_CLASS (code) == '<')
2306 {
2307 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2308 && !flag_unsafe_math_optimizations
2309 && code != NE_EXPR
2310 && code != EQ_EXPR)
2311 return build1 (TRUTH_NOT_EXPR, type, arg);
2312 else
2313 return build (invert_tree_comparison (code), type,
2314 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2315 }
2316
2317 switch (code)
2318 {
2319 case INTEGER_CST:
2320 return convert (type, build_int_2 (integer_zerop (arg), 0));
2321
2322 case TRUTH_AND_EXPR:
2323 return build (TRUTH_OR_EXPR, type,
2324 invert_truthvalue (TREE_OPERAND (arg, 0)),
2325 invert_truthvalue (TREE_OPERAND (arg, 1)));
2326
2327 case TRUTH_OR_EXPR:
2328 return build (TRUTH_AND_EXPR, type,
2329 invert_truthvalue (TREE_OPERAND (arg, 0)),
2330 invert_truthvalue (TREE_OPERAND (arg, 1)));
2331
2332 case TRUTH_XOR_EXPR:
2333 /* Here we can invert either operand. We invert the first operand
2334 unless the second operand is a TRUTH_NOT_EXPR in which case our
2335 result is the XOR of the first operand with the inside of the
2336 negation of the second operand. */
2337
2338 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2339 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2340 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2341 else
2342 return build (TRUTH_XOR_EXPR, type,
2343 invert_truthvalue (TREE_OPERAND (arg, 0)),
2344 TREE_OPERAND (arg, 1));
2345
2346 case TRUTH_ANDIF_EXPR:
2347 return build (TRUTH_ORIF_EXPR, type,
2348 invert_truthvalue (TREE_OPERAND (arg, 0)),
2349 invert_truthvalue (TREE_OPERAND (arg, 1)));
2350
2351 case TRUTH_ORIF_EXPR:
2352 return build (TRUTH_ANDIF_EXPR, type,
2353 invert_truthvalue (TREE_OPERAND (arg, 0)),
2354 invert_truthvalue (TREE_OPERAND (arg, 1)));
2355
2356 case TRUTH_NOT_EXPR:
2357 return TREE_OPERAND (arg, 0);
2358
2359 case COND_EXPR:
2360 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2361 invert_truthvalue (TREE_OPERAND (arg, 1)),
2362 invert_truthvalue (TREE_OPERAND (arg, 2)));
2363
2364 case COMPOUND_EXPR:
2365 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2366 invert_truthvalue (TREE_OPERAND (arg, 1)));
2367
2368 case WITH_RECORD_EXPR:
2369 return build (WITH_RECORD_EXPR, type,
2370 invert_truthvalue (TREE_OPERAND (arg, 0)),
2371 TREE_OPERAND (arg, 1));
2372
2373 case NON_LVALUE_EXPR:
2374 return invert_truthvalue (TREE_OPERAND (arg, 0));
2375
2376 case NOP_EXPR:
2377 case CONVERT_EXPR:
2378 case FLOAT_EXPR:
2379 return build1 (TREE_CODE (arg), type,
2380 invert_truthvalue (TREE_OPERAND (arg, 0)));
2381
2382 case BIT_AND_EXPR:
2383 if (!integer_onep (TREE_OPERAND (arg, 1)))
2384 break;
2385 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2386
2387 case SAVE_EXPR:
2388 return build1 (TRUTH_NOT_EXPR, type, arg);
2389
2390 case CLEANUP_POINT_EXPR:
2391 return build1 (CLEANUP_POINT_EXPR, type,
2392 invert_truthvalue (TREE_OPERAND (arg, 0)));
2393
2394 default:
2395 break;
2396 }
2397 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2398 abort ();
2399 return build1 (TRUTH_NOT_EXPR, type, arg);
2400 }
2401
2402 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2403 operands are another bit-wise operation with a common input. If so,
2404 distribute the bit operations to save an operation and possibly two if
2405 constants are involved. For example, convert
2406 (A | B) & (A | C) into A | (B & C)
2407 Further simplification will occur if B and C are constants.
2408
2409 If this optimization cannot be done, 0 will be returned. */
2410
2411 static tree
2412 distribute_bit_expr (code, type, arg0, arg1)
2413 enum tree_code code;
2414 tree type;
2415 tree arg0, arg1;
2416 {
2417 tree common;
2418 tree left, right;
2419
2420 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2421 || TREE_CODE (arg0) == code
2422 || (TREE_CODE (arg0) != BIT_AND_EXPR
2423 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2424 return 0;
2425
2426 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2427 {
2428 common = TREE_OPERAND (arg0, 0);
2429 left = TREE_OPERAND (arg0, 1);
2430 right = TREE_OPERAND (arg1, 1);
2431 }
2432 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2433 {
2434 common = TREE_OPERAND (arg0, 0);
2435 left = TREE_OPERAND (arg0, 1);
2436 right = TREE_OPERAND (arg1, 0);
2437 }
2438 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2439 {
2440 common = TREE_OPERAND (arg0, 1);
2441 left = TREE_OPERAND (arg0, 0);
2442 right = TREE_OPERAND (arg1, 1);
2443 }
2444 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2445 {
2446 common = TREE_OPERAND (arg0, 1);
2447 left = TREE_OPERAND (arg0, 0);
2448 right = TREE_OPERAND (arg1, 0);
2449 }
2450 else
2451 return 0;
2452
2453 return fold (build (TREE_CODE (arg0), type, common,
2454 fold (build (code, type, left, right))));
2455 }
2456 \f
2457 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2458 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2459
2460 static tree
2461 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2462 tree inner;
2463 tree type;
2464 int bitsize, bitpos;
2465 int unsignedp;
2466 {
2467 tree result = build (BIT_FIELD_REF, type, inner,
2468 size_int (bitsize), bitsize_int (bitpos));
2469
2470 TREE_UNSIGNED (result) = unsignedp;
2471
2472 return result;
2473 }
2474
2475 /* Optimize a bit-field compare.
2476
2477 There are two cases: First is a compare against a constant and the
2478 second is a comparison of two items where the fields are at the same
2479 bit position relative to the start of a chunk (byte, halfword, word)
2480 large enough to contain it. In these cases we can avoid the shift
2481 implicit in bitfield extractions.
2482
2483 For constants, we emit a compare of the shifted constant with the
2484 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2485 compared. For two fields at the same position, we do the ANDs with the
2486 similar mask and compare the result of the ANDs.
2487
2488 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2489 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2490 are the left and right operands of the comparison, respectively.
2491
2492 If the optimization described above can be done, we return the resulting
2493 tree. Otherwise we return zero. */
2494
2495 static tree
2496 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2497 enum tree_code code;
2498 tree compare_type;
2499 tree lhs, rhs;
2500 {
2501 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2502 tree type = TREE_TYPE (lhs);
2503 tree signed_type, unsigned_type;
2504 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2505 enum machine_mode lmode, rmode, nmode;
2506 int lunsignedp, runsignedp;
2507 int lvolatilep = 0, rvolatilep = 0;
2508 tree linner, rinner = NULL_TREE;
2509 tree mask;
2510 tree offset;
2511
2512 /* Get all the information about the extractions being done. If the bit size
2513 if the same as the size of the underlying object, we aren't doing an
2514 extraction at all and so can do nothing. We also don't want to
2515 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2516 then will no longer be able to replace it. */
2517 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2518 &lunsignedp, &lvolatilep);
2519 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2520 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2521 return 0;
2522
2523 if (!const_p)
2524 {
2525 /* If this is not a constant, we can only do something if bit positions,
2526 sizes, and signedness are the same. */
2527 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2528 &runsignedp, &rvolatilep);
2529
2530 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2531 || lunsignedp != runsignedp || offset != 0
2532 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2533 return 0;
2534 }
2535
2536 /* See if we can find a mode to refer to this field. We should be able to,
2537 but fail if we can't. */
2538 nmode = get_best_mode (lbitsize, lbitpos,
2539 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2540 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2541 TYPE_ALIGN (TREE_TYPE (rinner))),
2542 word_mode, lvolatilep || rvolatilep);
2543 if (nmode == VOIDmode)
2544 return 0;
2545
2546 /* Set signed and unsigned types of the precision of this mode for the
2547 shifts below. */
2548 signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2549 unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2550
2551 /* Compute the bit position and size for the new reference and our offset
2552 within it. If the new reference is the same size as the original, we
2553 won't optimize anything, so return zero. */
2554 nbitsize = GET_MODE_BITSIZE (nmode);
2555 nbitpos = lbitpos & ~ (nbitsize - 1);
2556 lbitpos -= nbitpos;
2557 if (nbitsize == lbitsize)
2558 return 0;
2559
2560 if (BYTES_BIG_ENDIAN)
2561 lbitpos = nbitsize - lbitsize - lbitpos;
2562
2563 /* Make the mask to be used against the extracted field. */
2564 mask = build_int_2 (~0, ~0);
2565 TREE_TYPE (mask) = unsigned_type;
2566 force_fit_type (mask, 0);
2567 mask = convert (unsigned_type, mask);
2568 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2569 mask = const_binop (RSHIFT_EXPR, mask,
2570 size_int (nbitsize - lbitsize - lbitpos), 0);
2571
2572 if (! const_p)
2573 /* If not comparing with constant, just rework the comparison
2574 and return. */
2575 return build (code, compare_type,
2576 build (BIT_AND_EXPR, unsigned_type,
2577 make_bit_field_ref (linner, unsigned_type,
2578 nbitsize, nbitpos, 1),
2579 mask),
2580 build (BIT_AND_EXPR, unsigned_type,
2581 make_bit_field_ref (rinner, unsigned_type,
2582 nbitsize, nbitpos, 1),
2583 mask));
2584
2585 /* Otherwise, we are handling the constant case. See if the constant is too
2586 big for the field. Warn and return a tree of for 0 (false) if so. We do
2587 this not only for its own sake, but to avoid having to test for this
2588 error case below. If we didn't, we might generate wrong code.
2589
2590 For unsigned fields, the constant shifted right by the field length should
2591 be all zero. For signed fields, the high-order bits should agree with
2592 the sign bit. */
2593
2594 if (lunsignedp)
2595 {
2596 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2597 convert (unsigned_type, rhs),
2598 size_int (lbitsize), 0)))
2599 {
2600 warning ("comparison is always %d due to width of bit-field",
2601 code == NE_EXPR);
2602 return convert (compare_type,
2603 (code == NE_EXPR
2604 ? integer_one_node : integer_zero_node));
2605 }
2606 }
2607 else
2608 {
2609 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2610 size_int (lbitsize - 1), 0);
2611 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2612 {
2613 warning ("comparison is always %d due to width of bit-field",
2614 code == NE_EXPR);
2615 return convert (compare_type,
2616 (code == NE_EXPR
2617 ? integer_one_node : integer_zero_node));
2618 }
2619 }
2620
2621 /* Single-bit compares should always be against zero. */
2622 if (lbitsize == 1 && ! integer_zerop (rhs))
2623 {
2624 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2625 rhs = convert (type, integer_zero_node);
2626 }
2627
2628 /* Make a new bitfield reference, shift the constant over the
2629 appropriate number of bits and mask it with the computed mask
2630 (in case this was a signed field). If we changed it, make a new one. */
2631 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2632 if (lvolatilep)
2633 {
2634 TREE_SIDE_EFFECTS (lhs) = 1;
2635 TREE_THIS_VOLATILE (lhs) = 1;
2636 }
2637
2638 rhs = fold (const_binop (BIT_AND_EXPR,
2639 const_binop (LSHIFT_EXPR,
2640 convert (unsigned_type, rhs),
2641 size_int (lbitpos), 0),
2642 mask, 0));
2643
2644 return build (code, compare_type,
2645 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2646 rhs);
2647 }
2648 \f
2649 /* Subroutine for fold_truthop: decode a field reference.
2650
2651 If EXP is a comparison reference, we return the innermost reference.
2652
2653 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2654 set to the starting bit number.
2655
2656 If the innermost field can be completely contained in a mode-sized
2657 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2658
2659 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2660 otherwise it is not changed.
2661
2662 *PUNSIGNEDP is set to the signedness of the field.
2663
2664 *PMASK is set to the mask used. This is either contained in a
2665 BIT_AND_EXPR or derived from the width of the field.
2666
2667 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2668
2669 Return 0 if this is not a component reference or is one that we can't
2670 do anything with. */
2671
2672 static tree
2673 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
2674 pvolatilep, pmask, pand_mask)
2675 tree exp;
2676 HOST_WIDE_INT *pbitsize, *pbitpos;
2677 enum machine_mode *pmode;
2678 int *punsignedp, *pvolatilep;
2679 tree *pmask;
2680 tree *pand_mask;
2681 {
2682 tree and_mask = 0;
2683 tree mask, inner, offset;
2684 tree unsigned_type;
2685 unsigned int precision;
2686
2687 /* All the optimizations using this function assume integer fields.
2688 There are problems with FP fields since the type_for_size call
2689 below can fail for, e.g., XFmode. */
2690 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2691 return 0;
2692
2693 STRIP_NOPS (exp);
2694
2695 if (TREE_CODE (exp) == BIT_AND_EXPR)
2696 {
2697 and_mask = TREE_OPERAND (exp, 1);
2698 exp = TREE_OPERAND (exp, 0);
2699 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2700 if (TREE_CODE (and_mask) != INTEGER_CST)
2701 return 0;
2702 }
2703
2704 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2705 punsignedp, pvolatilep);
2706 if ((inner == exp && and_mask == 0)
2707 || *pbitsize < 0 || offset != 0
2708 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
2709 return 0;
2710
2711 /* Compute the mask to access the bitfield. */
2712 unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
2713 precision = TYPE_PRECISION (unsigned_type);
2714
2715 mask = build_int_2 (~0, ~0);
2716 TREE_TYPE (mask) = unsigned_type;
2717 force_fit_type (mask, 0);
2718 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2719 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2720
2721 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2722 if (and_mask != 0)
2723 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2724 convert (unsigned_type, and_mask), mask));
2725
2726 *pmask = mask;
2727 *pand_mask = and_mask;
2728 return inner;
2729 }
2730
2731 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
2732 bit positions. */
2733
2734 static int
2735 all_ones_mask_p (mask, size)
2736 tree mask;
2737 int size;
2738 {
2739 tree type = TREE_TYPE (mask);
2740 unsigned int precision = TYPE_PRECISION (type);
2741 tree tmask;
2742
2743 tmask = build_int_2 (~0, ~0);
2744 TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
2745 force_fit_type (tmask, 0);
2746 return
2747 tree_int_cst_equal (mask,
2748 const_binop (RSHIFT_EXPR,
2749 const_binop (LSHIFT_EXPR, tmask,
2750 size_int (precision - size),
2751 0),
2752 size_int (precision - size), 0));
2753 }
2754
2755 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
2756 represents the sign bit of EXP's type. If EXP represents a sign
2757 or zero extension, also test VAL against the unextended type.
2758 The return value is the (sub)expression whose sign bit is VAL,
2759 or NULL_TREE otherwise. */
2760
2761 static tree
2762 sign_bit_p (exp, val)
2763 tree exp;
2764 tree val;
2765 {
2766 unsigned HOST_WIDE_INT lo;
2767 HOST_WIDE_INT hi;
2768 int width;
2769 tree t;
2770
2771 /* Tree EXP must have an integral type. */
2772 t = TREE_TYPE (exp);
2773 if (! INTEGRAL_TYPE_P (t))
2774 return NULL_TREE;
2775
2776 /* Tree VAL must be an integer constant. */
2777 if (TREE_CODE (val) != INTEGER_CST
2778 || TREE_CONSTANT_OVERFLOW (val))
2779 return NULL_TREE;
2780
2781 width = TYPE_PRECISION (t);
2782 if (width > HOST_BITS_PER_WIDE_INT)
2783 {
2784 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
2785 lo = 0;
2786 }
2787 else
2788 {
2789 hi = 0;
2790 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
2791 }
2792
2793 if (TREE_INT_CST_HIGH (val) == hi && TREE_INT_CST_LOW (val) == lo)
2794 return exp;
2795
2796 /* Handle extension from a narrower type. */
2797 if (TREE_CODE (exp) == NOP_EXPR
2798 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
2799 return sign_bit_p (TREE_OPERAND (exp, 0), val);
2800
2801 return NULL_TREE;
2802 }
2803
2804 /* Subroutine for fold_truthop: determine if an operand is simple enough
2805 to be evaluated unconditionally. */
2806
2807 static int
2808 simple_operand_p (exp)
2809 tree exp;
2810 {
2811 /* Strip any conversions that don't change the machine mode. */
2812 while ((TREE_CODE (exp) == NOP_EXPR
2813 || TREE_CODE (exp) == CONVERT_EXPR)
2814 && (TYPE_MODE (TREE_TYPE (exp))
2815 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2816 exp = TREE_OPERAND (exp, 0);
2817
2818 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2819 || (DECL_P (exp)
2820 && ! TREE_ADDRESSABLE (exp)
2821 && ! TREE_THIS_VOLATILE (exp)
2822 && ! DECL_NONLOCAL (exp)
2823 /* Don't regard global variables as simple. They may be
2824 allocated in ways unknown to the compiler (shared memory,
2825 #pragma weak, etc). */
2826 && ! TREE_PUBLIC (exp)
2827 && ! DECL_EXTERNAL (exp)
2828 /* Loading a static variable is unduly expensive, but global
2829 registers aren't expensive. */
2830 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2831 }
2832 \f
2833 /* The following functions are subroutines to fold_range_test and allow it to
2834 try to change a logical combination of comparisons into a range test.
2835
2836 For example, both
2837 X == 2 || X == 3 || X == 4 || X == 5
2838 and
2839 X >= 2 && X <= 5
2840 are converted to
2841 (unsigned) (X - 2) <= 3
2842
2843 We describe each set of comparisons as being either inside or outside
2844 a range, using a variable named like IN_P, and then describe the
2845 range with a lower and upper bound. If one of the bounds is omitted,
2846 it represents either the highest or lowest value of the type.
2847
2848 In the comments below, we represent a range by two numbers in brackets
2849 preceded by a "+" to designate being inside that range, or a "-" to
2850 designate being outside that range, so the condition can be inverted by
2851 flipping the prefix. An omitted bound is represented by a "-". For
2852 example, "- [-, 10]" means being outside the range starting at the lowest
2853 possible value and ending at 10, in other words, being greater than 10.
2854 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
2855 always false.
2856
2857 We set up things so that the missing bounds are handled in a consistent
2858 manner so neither a missing bound nor "true" and "false" need to be
2859 handled using a special case. */
2860
2861 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
2862 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
2863 and UPPER1_P are nonzero if the respective argument is an upper bound
2864 and zero for a lower. TYPE, if nonzero, is the type of the result; it
2865 must be specified for a comparison. ARG1 will be converted to ARG0's
2866 type if both are specified. */
2867
2868 static tree
2869 range_binop (code, type, arg0, upper0_p, arg1, upper1_p)
2870 enum tree_code code;
2871 tree type;
2872 tree arg0, arg1;
2873 int upper0_p, upper1_p;
2874 {
2875 tree tem;
2876 int result;
2877 int sgn0, sgn1;
2878
2879 /* If neither arg represents infinity, do the normal operation.
2880 Else, if not a comparison, return infinity. Else handle the special
2881 comparison rules. Note that most of the cases below won't occur, but
2882 are handled for consistency. */
2883
2884 if (arg0 != 0 && arg1 != 0)
2885 {
2886 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
2887 arg0, convert (TREE_TYPE (arg0), arg1)));
2888 STRIP_NOPS (tem);
2889 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
2890 }
2891
2892 if (TREE_CODE_CLASS (code) != '<')
2893 return 0;
2894
2895 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
2896 for neither. In real maths, we cannot assume open ended ranges are
2897 the same. But, this is computer arithmetic, where numbers are finite.
2898 We can therefore make the transformation of any unbounded range with
2899 the value Z, Z being greater than any representable number. This permits
2900 us to treat unbounded ranges as equal. */
2901 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
2902 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
2903 switch (code)
2904 {
2905 case EQ_EXPR:
2906 result = sgn0 == sgn1;
2907 break;
2908 case NE_EXPR:
2909 result = sgn0 != sgn1;
2910 break;
2911 case LT_EXPR:
2912 result = sgn0 < sgn1;
2913 break;
2914 case LE_EXPR:
2915 result = sgn0 <= sgn1;
2916 break;
2917 case GT_EXPR:
2918 result = sgn0 > sgn1;
2919 break;
2920 case GE_EXPR:
2921 result = sgn0 >= sgn1;
2922 break;
2923 default:
2924 abort ();
2925 }
2926
2927 return convert (type, result ? integer_one_node : integer_zero_node);
2928 }
2929 \f
2930 /* Given EXP, a logical expression, set the range it is testing into
2931 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
2932 actually being tested. *PLOW and *PHIGH will be made of the same type
2933 as the returned expression. If EXP is not a comparison, we will most
2934 likely not be returning a useful value and range. */
2935
2936 static tree
2937 make_range (exp, pin_p, plow, phigh)
2938 tree exp;
2939 int *pin_p;
2940 tree *plow, *phigh;
2941 {
2942 enum tree_code code;
2943 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
2944 tree orig_type = NULL_TREE;
2945 int in_p, n_in_p;
2946 tree low, high, n_low, n_high;
2947
2948 /* Start with simply saying "EXP != 0" and then look at the code of EXP
2949 and see if we can refine the range. Some of the cases below may not
2950 happen, but it doesn't seem worth worrying about this. We "continue"
2951 the outer loop when we've changed something; otherwise we "break"
2952 the switch, which will "break" the while. */
2953
2954 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
2955
2956 while (1)
2957 {
2958 code = TREE_CODE (exp);
2959
2960 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
2961 {
2962 arg0 = TREE_OPERAND (exp, 0);
2963 if (TREE_CODE_CLASS (code) == '<'
2964 || TREE_CODE_CLASS (code) == '1'
2965 || TREE_CODE_CLASS (code) == '2')
2966 type = TREE_TYPE (arg0);
2967 if (TREE_CODE_CLASS (code) == '2'
2968 || TREE_CODE_CLASS (code) == '<'
2969 || (TREE_CODE_CLASS (code) == 'e'
2970 && TREE_CODE_LENGTH (code) > 1))
2971 arg1 = TREE_OPERAND (exp, 1);
2972 }
2973
2974 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
2975 lose a cast by accident. */
2976 if (type != NULL_TREE && orig_type == NULL_TREE)
2977 orig_type = type;
2978
2979 switch (code)
2980 {
2981 case TRUTH_NOT_EXPR:
2982 in_p = ! in_p, exp = arg0;
2983 continue;
2984
2985 case EQ_EXPR: case NE_EXPR:
2986 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
2987 /* We can only do something if the range is testing for zero
2988 and if the second operand is an integer constant. Note that
2989 saying something is "in" the range we make is done by
2990 complementing IN_P since it will set in the initial case of
2991 being not equal to zero; "out" is leaving it alone. */
2992 if (low == 0 || high == 0
2993 || ! integer_zerop (low) || ! integer_zerop (high)
2994 || TREE_CODE (arg1) != INTEGER_CST)
2995 break;
2996
2997 switch (code)
2998 {
2999 case NE_EXPR: /* - [c, c] */
3000 low = high = arg1;
3001 break;
3002 case EQ_EXPR: /* + [c, c] */
3003 in_p = ! in_p, low = high = arg1;
3004 break;
3005 case GT_EXPR: /* - [-, c] */
3006 low = 0, high = arg1;
3007 break;
3008 case GE_EXPR: /* + [c, -] */
3009 in_p = ! in_p, low = arg1, high = 0;
3010 break;
3011 case LT_EXPR: /* - [c, -] */
3012 low = arg1, high = 0;
3013 break;
3014 case LE_EXPR: /* + [-, c] */
3015 in_p = ! in_p, low = 0, high = arg1;
3016 break;
3017 default:
3018 abort ();
3019 }
3020
3021 exp = arg0;
3022
3023 /* If this is an unsigned comparison, we also know that EXP is
3024 greater than or equal to zero. We base the range tests we make
3025 on that fact, so we record it here so we can parse existing
3026 range tests. */
3027 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3028 {
3029 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3030 1, convert (type, integer_zero_node),
3031 NULL_TREE))
3032 break;
3033
3034 in_p = n_in_p, low = n_low, high = n_high;
3035
3036 /* If the high bound is missing, but we
3037 have a low bound, reverse the range so
3038 it goes from zero to the low bound minus 1. */
3039 if (high == 0 && low)
3040 {
3041 in_p = ! in_p;
3042 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3043 integer_one_node, 0);
3044 low = convert (type, integer_zero_node);
3045 }
3046 }
3047 continue;
3048
3049 case NEGATE_EXPR:
3050 /* (-x) IN [a,b] -> x in [-b, -a] */
3051 n_low = range_binop (MINUS_EXPR, type,
3052 convert (type, integer_zero_node), 0, high, 1);
3053 n_high = range_binop (MINUS_EXPR, type,
3054 convert (type, integer_zero_node), 0, low, 0);
3055 low = n_low, high = n_high;
3056 exp = arg0;
3057 continue;
3058
3059 case BIT_NOT_EXPR:
3060 /* ~ X -> -X - 1 */
3061 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3062 convert (type, integer_one_node));
3063 continue;
3064
3065 case PLUS_EXPR: case MINUS_EXPR:
3066 if (TREE_CODE (arg1) != INTEGER_CST)
3067 break;
3068
3069 /* If EXP is signed, any overflow in the computation is undefined,
3070 so we don't worry about it so long as our computations on
3071 the bounds don't overflow. For unsigned, overflow is defined
3072 and this is exactly the right thing. */
3073 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3074 type, low, 0, arg1, 0);
3075 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3076 type, high, 1, arg1, 0);
3077 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3078 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3079 break;
3080
3081 /* Check for an unsigned range which has wrapped around the maximum
3082 value thus making n_high < n_low, and normalize it. */
3083 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3084 {
3085 low = range_binop (PLUS_EXPR, type, n_high, 0,
3086 integer_one_node, 0);
3087 high = range_binop (MINUS_EXPR, type, n_low, 0,
3088 integer_one_node, 0);
3089
3090 /* If the range is of the form +/- [ x+1, x ], we won't
3091 be able to normalize it. But then, it represents the
3092 whole range or the empty set, so make it
3093 +/- [ -, - ]. */
3094 if (tree_int_cst_equal (n_low, low)
3095 && tree_int_cst_equal (n_high, high))
3096 low = high = 0;
3097 else
3098 in_p = ! in_p;
3099 }
3100 else
3101 low = n_low, high = n_high;
3102
3103 exp = arg0;
3104 continue;
3105
3106 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3107 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3108 break;
3109
3110 if (! INTEGRAL_TYPE_P (type)
3111 || (low != 0 && ! int_fits_type_p (low, type))
3112 || (high != 0 && ! int_fits_type_p (high, type)))
3113 break;
3114
3115 n_low = low, n_high = high;
3116
3117 if (n_low != 0)
3118 n_low = convert (type, n_low);
3119
3120 if (n_high != 0)
3121 n_high = convert (type, n_high);
3122
3123 /* If we're converting from an unsigned to a signed type,
3124 we will be doing the comparison as unsigned. The tests above
3125 have already verified that LOW and HIGH are both positive.
3126
3127 So we have to make sure that the original unsigned value will
3128 be interpreted as positive. */
3129 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3130 {
3131 tree equiv_type = (*lang_hooks.types.type_for_mode)
3132 (TYPE_MODE (type), 1);
3133 tree high_positive;
3134
3135 /* A range without an upper bound is, naturally, unbounded.
3136 Since convert would have cropped a very large value, use
3137 the max value for the destination type. */
3138 high_positive
3139 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3140 : TYPE_MAX_VALUE (type);
3141
3142 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3143 high_positive = fold (build (RSHIFT_EXPR, type,
3144 convert (type, high_positive),
3145 convert (type, integer_one_node)));
3146
3147 /* If the low bound is specified, "and" the range with the
3148 range for which the original unsigned value will be
3149 positive. */
3150 if (low != 0)
3151 {
3152 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3153 1, n_low, n_high,
3154 1, convert (type, integer_zero_node),
3155 high_positive))
3156 break;
3157
3158 in_p = (n_in_p == in_p);
3159 }
3160 else
3161 {
3162 /* Otherwise, "or" the range with the range of the input
3163 that will be interpreted as negative. */
3164 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3165 0, n_low, n_high,
3166 1, convert (type, integer_zero_node),
3167 high_positive))
3168 break;
3169
3170 in_p = (in_p != n_in_p);
3171 }
3172 }
3173
3174 exp = arg0;
3175 low = n_low, high = n_high;
3176 continue;
3177
3178 default:
3179 break;
3180 }
3181
3182 break;
3183 }
3184
3185 /* If EXP is a constant, we can evaluate whether this is true or false. */
3186 if (TREE_CODE (exp) == INTEGER_CST)
3187 {
3188 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3189 exp, 0, low, 0))
3190 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3191 exp, 1, high, 1)));
3192 low = high = 0;
3193 exp = 0;
3194 }
3195
3196 *pin_p = in_p, *plow = low, *phigh = high;
3197 return exp;
3198 }
3199 \f
3200 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3201 type, TYPE, return an expression to test if EXP is in (or out of, depending
3202 on IN_P) the range. */
3203
3204 static tree
3205 build_range_check (type, exp, in_p, low, high)
3206 tree type;
3207 tree exp;
3208 int in_p;
3209 tree low, high;
3210 {
3211 tree etype = TREE_TYPE (exp);
3212 tree value;
3213
3214 if (! in_p
3215 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3216 return invert_truthvalue (value);
3217
3218 if (low == 0 && high == 0)
3219 return convert (type, integer_one_node);
3220
3221 if (low == 0)
3222 return fold (build (LE_EXPR, type, exp, high));
3223
3224 if (high == 0)
3225 return fold (build (GE_EXPR, type, exp, low));
3226
3227 if (operand_equal_p (low, high, 0))
3228 return fold (build (EQ_EXPR, type, exp, low));
3229
3230 if (integer_zerop (low))
3231 {
3232 if (! TREE_UNSIGNED (etype))
3233 {
3234 etype = (*lang_hooks.types.unsigned_type) (etype);
3235 high = convert (etype, high);
3236 exp = convert (etype, exp);
3237 }
3238 return build_range_check (type, exp, 1, 0, high);
3239 }
3240
3241 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3242 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3243 {
3244 unsigned HOST_WIDE_INT lo;
3245 HOST_WIDE_INT hi;
3246 int prec;
3247
3248 prec = TYPE_PRECISION (etype);
3249 if (prec <= HOST_BITS_PER_WIDE_INT)
3250 {
3251 hi = 0;
3252 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3253 }
3254 else
3255 {
3256 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3257 lo = (unsigned HOST_WIDE_INT) -1;
3258 }
3259
3260 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3261 {
3262 if (TREE_UNSIGNED (etype))
3263 {
3264 etype = (*lang_hooks.types.signed_type) (etype);
3265 exp = convert (etype, exp);
3266 }
3267 return fold (build (GT_EXPR, type, exp,
3268 convert (etype, integer_zero_node)));
3269 }
3270 }
3271
3272 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3273 && ! TREE_OVERFLOW (value))
3274 return build_range_check (type,
3275 fold (build (MINUS_EXPR, etype, exp, low)),
3276 1, convert (etype, integer_zero_node), value);
3277
3278 return 0;
3279 }
3280 \f
3281 /* Given two ranges, see if we can merge them into one. Return 1 if we
3282 can, 0 if we can't. Set the output range into the specified parameters. */
3283
3284 static int
3285 merge_ranges (pin_p, plow, phigh, in0_p, low0, high0, in1_p, low1, high1)
3286 int *pin_p;
3287 tree *plow, *phigh;
3288 int in0_p, in1_p;
3289 tree low0, high0, low1, high1;
3290 {
3291 int no_overlap;
3292 int subset;
3293 int temp;
3294 tree tem;
3295 int in_p;
3296 tree low, high;
3297 int lowequal = ((low0 == 0 && low1 == 0)
3298 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3299 low0, 0, low1, 0)));
3300 int highequal = ((high0 == 0 && high1 == 0)
3301 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3302 high0, 1, high1, 1)));
3303
3304 /* Make range 0 be the range that starts first, or ends last if they
3305 start at the same value. Swap them if it isn't. */
3306 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3307 low0, 0, low1, 0))
3308 || (lowequal
3309 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3310 high1, 1, high0, 1))))
3311 {
3312 temp = in0_p, in0_p = in1_p, in1_p = temp;
3313 tem = low0, low0 = low1, low1 = tem;
3314 tem = high0, high0 = high1, high1 = tem;
3315 }
3316
3317 /* Now flag two cases, whether the ranges are disjoint or whether the
3318 second range is totally subsumed in the first. Note that the tests
3319 below are simplified by the ones above. */
3320 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3321 high0, 1, low1, 0));
3322 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3323 high1, 1, high0, 1));
3324
3325 /* We now have four cases, depending on whether we are including or
3326 excluding the two ranges. */
3327 if (in0_p && in1_p)
3328 {
3329 /* If they don't overlap, the result is false. If the second range
3330 is a subset it is the result. Otherwise, the range is from the start
3331 of the second to the end of the first. */
3332 if (no_overlap)
3333 in_p = 0, low = high = 0;
3334 else if (subset)
3335 in_p = 1, low = low1, high = high1;
3336 else
3337 in_p = 1, low = low1, high = high0;
3338 }
3339
3340 else if (in0_p && ! in1_p)
3341 {
3342 /* If they don't overlap, the result is the first range. If they are
3343 equal, the result is false. If the second range is a subset of the
3344 first, and the ranges begin at the same place, we go from just after
3345 the end of the first range to the end of the second. If the second
3346 range is not a subset of the first, or if it is a subset and both
3347 ranges end at the same place, the range starts at the start of the
3348 first range and ends just before the second range.
3349 Otherwise, we can't describe this as a single range. */
3350 if (no_overlap)
3351 in_p = 1, low = low0, high = high0;
3352 else if (lowequal && highequal)
3353 in_p = 0, low = high = 0;
3354 else if (subset && lowequal)
3355 {
3356 in_p = 1, high = high0;
3357 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3358 integer_one_node, 0);
3359 }
3360 else if (! subset || highequal)
3361 {
3362 in_p = 1, low = low0;
3363 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3364 integer_one_node, 0);
3365 }
3366 else
3367 return 0;
3368 }
3369
3370 else if (! in0_p && in1_p)
3371 {
3372 /* If they don't overlap, the result is the second range. If the second
3373 is a subset of the first, the result is false. Otherwise,
3374 the range starts just after the first range and ends at the
3375 end of the second. */
3376 if (no_overlap)
3377 in_p = 1, low = low1, high = high1;
3378 else if (subset || highequal)
3379 in_p = 0, low = high = 0;
3380 else
3381 {
3382 in_p = 1, high = high1;
3383 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3384 integer_one_node, 0);
3385 }
3386 }
3387
3388 else
3389 {
3390 /* The case where we are excluding both ranges. Here the complex case
3391 is if they don't overlap. In that case, the only time we have a
3392 range is if they are adjacent. If the second is a subset of the
3393 first, the result is the first. Otherwise, the range to exclude
3394 starts at the beginning of the first range and ends at the end of the
3395 second. */
3396 if (no_overlap)
3397 {
3398 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3399 range_binop (PLUS_EXPR, NULL_TREE,
3400 high0, 1,
3401 integer_one_node, 1),
3402 1, low1, 0)))
3403 in_p = 0, low = low0, high = high1;
3404 else
3405 return 0;
3406 }
3407 else if (subset)
3408 in_p = 0, low = low0, high = high0;
3409 else
3410 in_p = 0, low = low0, high = high1;
3411 }
3412
3413 *pin_p = in_p, *plow = low, *phigh = high;
3414 return 1;
3415 }
3416 \f
3417 /* EXP is some logical combination of boolean tests. See if we can
3418 merge it into some range test. Return the new tree if so. */
3419
3420 static tree
3421 fold_range_test (exp)
3422 tree exp;
3423 {
3424 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3425 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3426 int in0_p, in1_p, in_p;
3427 tree low0, low1, low, high0, high1, high;
3428 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3429 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3430 tree tem;
3431
3432 /* If this is an OR operation, invert both sides; we will invert
3433 again at the end. */
3434 if (or_op)
3435 in0_p = ! in0_p, in1_p = ! in1_p;
3436
3437 /* If both expressions are the same, if we can merge the ranges, and we
3438 can build the range test, return it or it inverted. If one of the
3439 ranges is always true or always false, consider it to be the same
3440 expression as the other. */
3441 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3442 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3443 in1_p, low1, high1)
3444 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3445 lhs != 0 ? lhs
3446 : rhs != 0 ? rhs : integer_zero_node,
3447 in_p, low, high))))
3448 return or_op ? invert_truthvalue (tem) : tem;
3449
3450 /* On machines where the branch cost is expensive, if this is a
3451 short-circuited branch and the underlying object on both sides
3452 is the same, make a non-short-circuit operation. */
3453 else if (BRANCH_COST >= 2
3454 && lhs != 0 && rhs != 0
3455 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3456 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3457 && operand_equal_p (lhs, rhs, 0))
3458 {
3459 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3460 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3461 which cases we can't do this. */
3462 if (simple_operand_p (lhs))
3463 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3464 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3465 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3466 TREE_OPERAND (exp, 1));
3467
3468 else if ((*lang_hooks.decls.global_bindings_p) () == 0
3469 && ! contains_placeholder_p (lhs))
3470 {
3471 tree common = save_expr (lhs);
3472
3473 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3474 or_op ? ! in0_p : in0_p,
3475 low0, high0))
3476 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3477 or_op ? ! in1_p : in1_p,
3478 low1, high1))))
3479 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3480 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3481 TREE_TYPE (exp), lhs, rhs);
3482 }
3483 }
3484
3485 return 0;
3486 }
3487 \f
3488 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3489 bit value. Arrange things so the extra bits will be set to zero if and
3490 only if C is signed-extended to its full width. If MASK is nonzero,
3491 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3492
3493 static tree
3494 unextend (c, p, unsignedp, mask)
3495 tree c;
3496 int p;
3497 int unsignedp;
3498 tree mask;
3499 {
3500 tree type = TREE_TYPE (c);
3501 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3502 tree temp;
3503
3504 if (p == modesize || unsignedp)
3505 return c;
3506
3507 /* We work by getting just the sign bit into the low-order bit, then
3508 into the high-order bit, then sign-extend. We then XOR that value
3509 with C. */
3510 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3511 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3512
3513 /* We must use a signed type in order to get an arithmetic right shift.
3514 However, we must also avoid introducing accidental overflows, so that
3515 a subsequent call to integer_zerop will work. Hence we must
3516 do the type conversion here. At this point, the constant is either
3517 zero or one, and the conversion to a signed type can never overflow.
3518 We could get an overflow if this conversion is done anywhere else. */
3519 if (TREE_UNSIGNED (type))
3520 temp = convert ((*lang_hooks.types.signed_type) (type), temp);
3521
3522 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3523 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3524 if (mask != 0)
3525 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3526 /* If necessary, convert the type back to match the type of C. */
3527 if (TREE_UNSIGNED (type))
3528 temp = convert (type, temp);
3529
3530 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3531 }
3532 \f
3533 /* Find ways of folding logical expressions of LHS and RHS:
3534 Try to merge two comparisons to the same innermost item.
3535 Look for range tests like "ch >= '0' && ch <= '9'".
3536 Look for combinations of simple terms on machines with expensive branches
3537 and evaluate the RHS unconditionally.
3538
3539 For example, if we have p->a == 2 && p->b == 4 and we can make an
3540 object large enough to span both A and B, we can do this with a comparison
3541 against the object ANDed with the a mask.
3542
3543 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3544 operations to do this with one comparison.
3545
3546 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3547 function and the one above.
3548
3549 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3550 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3551
3552 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3553 two operands.
3554
3555 We return the simplified tree or 0 if no optimization is possible. */
3556
3557 static tree
3558 fold_truthop (code, truth_type, lhs, rhs)
3559 enum tree_code code;
3560 tree truth_type, lhs, rhs;
3561 {
3562 /* If this is the "or" of two comparisons, we can do something if
3563 the comparisons are NE_EXPR. If this is the "and", we can do something
3564 if the comparisons are EQ_EXPR. I.e.,
3565 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3566
3567 WANTED_CODE is this operation code. For single bit fields, we can
3568 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3569 comparison for one-bit fields. */
3570
3571 enum tree_code wanted_code;
3572 enum tree_code lcode, rcode;
3573 tree ll_arg, lr_arg, rl_arg, rr_arg;
3574 tree ll_inner, lr_inner, rl_inner, rr_inner;
3575 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3576 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3577 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3578 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3579 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3580 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3581 enum machine_mode lnmode, rnmode;
3582 tree ll_mask, lr_mask, rl_mask, rr_mask;
3583 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3584 tree l_const, r_const;
3585 tree lntype, rntype, result;
3586 int first_bit, end_bit;
3587 int volatilep;
3588
3589 /* Start by getting the comparison codes. Fail if anything is volatile.
3590 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3591 it were surrounded with a NE_EXPR. */
3592
3593 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3594 return 0;
3595
3596 lcode = TREE_CODE (lhs);
3597 rcode = TREE_CODE (rhs);
3598
3599 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3600 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3601
3602 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3603 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3604
3605 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3606 return 0;
3607
3608 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3609 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3610
3611 ll_arg = TREE_OPERAND (lhs, 0);
3612 lr_arg = TREE_OPERAND (lhs, 1);
3613 rl_arg = TREE_OPERAND (rhs, 0);
3614 rr_arg = TREE_OPERAND (rhs, 1);
3615
3616 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3617 if (simple_operand_p (ll_arg)
3618 && simple_operand_p (lr_arg)
3619 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3620 {
3621 int compcode;
3622
3623 if (operand_equal_p (ll_arg, rl_arg, 0)
3624 && operand_equal_p (lr_arg, rr_arg, 0))
3625 {
3626 int lcompcode, rcompcode;
3627
3628 lcompcode = comparison_to_compcode (lcode);
3629 rcompcode = comparison_to_compcode (rcode);
3630 compcode = (code == TRUTH_AND_EXPR)
3631 ? lcompcode & rcompcode
3632 : lcompcode | rcompcode;
3633 }
3634 else if (operand_equal_p (ll_arg, rr_arg, 0)
3635 && operand_equal_p (lr_arg, rl_arg, 0))
3636 {
3637 int lcompcode, rcompcode;
3638
3639 rcode = swap_tree_comparison (rcode);
3640 lcompcode = comparison_to_compcode (lcode);
3641 rcompcode = comparison_to_compcode (rcode);
3642 compcode = (code == TRUTH_AND_EXPR)
3643 ? lcompcode & rcompcode
3644 : lcompcode | rcompcode;
3645 }
3646 else
3647 compcode = -1;
3648
3649 if (compcode == COMPCODE_TRUE)
3650 return convert (truth_type, integer_one_node);
3651 else if (compcode == COMPCODE_FALSE)
3652 return convert (truth_type, integer_zero_node);
3653 else if (compcode != -1)
3654 return build (compcode_to_comparison (compcode),
3655 truth_type, ll_arg, lr_arg);
3656 }
3657
3658 /* If the RHS can be evaluated unconditionally and its operands are
3659 simple, it wins to evaluate the RHS unconditionally on machines
3660 with expensive branches. In this case, this isn't a comparison
3661 that can be merged. Avoid doing this if the RHS is a floating-point
3662 comparison since those can trap. */
3663
3664 if (BRANCH_COST >= 2
3665 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3666 && simple_operand_p (rl_arg)
3667 && simple_operand_p (rr_arg))
3668 {
3669 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
3670 if (code == TRUTH_OR_EXPR
3671 && lcode == NE_EXPR && integer_zerop (lr_arg)
3672 && rcode == NE_EXPR && integer_zerop (rr_arg)
3673 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3674 return build (NE_EXPR, truth_type,
3675 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3676 ll_arg, rl_arg),
3677 integer_zero_node);
3678
3679 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
3680 if (code == TRUTH_AND_EXPR
3681 && lcode == EQ_EXPR && integer_zerop (lr_arg)
3682 && rcode == EQ_EXPR && integer_zerop (rr_arg)
3683 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3684 return build (EQ_EXPR, truth_type,
3685 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3686 ll_arg, rl_arg),
3687 integer_zero_node);
3688
3689 return build (code, truth_type, lhs, rhs);
3690 }
3691
3692 /* See if the comparisons can be merged. Then get all the parameters for
3693 each side. */
3694
3695 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3696 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3697 return 0;
3698
3699 volatilep = 0;
3700 ll_inner = decode_field_reference (ll_arg,
3701 &ll_bitsize, &ll_bitpos, &ll_mode,
3702 &ll_unsignedp, &volatilep, &ll_mask,
3703 &ll_and_mask);
3704 lr_inner = decode_field_reference (lr_arg,
3705 &lr_bitsize, &lr_bitpos, &lr_mode,
3706 &lr_unsignedp, &volatilep, &lr_mask,
3707 &lr_and_mask);
3708 rl_inner = decode_field_reference (rl_arg,
3709 &rl_bitsize, &rl_bitpos, &rl_mode,
3710 &rl_unsignedp, &volatilep, &rl_mask,
3711 &rl_and_mask);
3712 rr_inner = decode_field_reference (rr_arg,
3713 &rr_bitsize, &rr_bitpos, &rr_mode,
3714 &rr_unsignedp, &volatilep, &rr_mask,
3715 &rr_and_mask);
3716
3717 /* It must be true that the inner operation on the lhs of each
3718 comparison must be the same if we are to be able to do anything.
3719 Then see if we have constants. If not, the same must be true for
3720 the rhs's. */
3721 if (volatilep || ll_inner == 0 || rl_inner == 0
3722 || ! operand_equal_p (ll_inner, rl_inner, 0))
3723 return 0;
3724
3725 if (TREE_CODE (lr_arg) == INTEGER_CST
3726 && TREE_CODE (rr_arg) == INTEGER_CST)
3727 l_const = lr_arg, r_const = rr_arg;
3728 else if (lr_inner == 0 || rr_inner == 0
3729 || ! operand_equal_p (lr_inner, rr_inner, 0))
3730 return 0;
3731 else
3732 l_const = r_const = 0;
3733
3734 /* If either comparison code is not correct for our logical operation,
3735 fail. However, we can convert a one-bit comparison against zero into
3736 the opposite comparison against that bit being set in the field. */
3737
3738 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3739 if (lcode != wanted_code)
3740 {
3741 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3742 {
3743 /* Make the left operand unsigned, since we are only interested
3744 in the value of one bit. Otherwise we are doing the wrong
3745 thing below. */
3746 ll_unsignedp = 1;
3747 l_const = ll_mask;
3748 }
3749 else
3750 return 0;
3751 }
3752
3753 /* This is analogous to the code for l_const above. */
3754 if (rcode != wanted_code)
3755 {
3756 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3757 {
3758 rl_unsignedp = 1;
3759 r_const = rl_mask;
3760 }
3761 else
3762 return 0;
3763 }
3764
3765 /* After this point all optimizations will generate bit-field
3766 references, which we might not want. */
3767 if (! (*lang_hooks.can_use_bit_fields_p) ())
3768 return 0;
3769
3770 /* See if we can find a mode that contains both fields being compared on
3771 the left. If we can't, fail. Otherwise, update all constants and masks
3772 to be relative to a field of that size. */
3773 first_bit = MIN (ll_bitpos, rl_bitpos);
3774 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3775 lnmode = get_best_mode (end_bit - first_bit, first_bit,
3776 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3777 volatilep);
3778 if (lnmode == VOIDmode)
3779 return 0;
3780
3781 lnbitsize = GET_MODE_BITSIZE (lnmode);
3782 lnbitpos = first_bit & ~ (lnbitsize - 1);
3783 lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
3784 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3785
3786 if (BYTES_BIG_ENDIAN)
3787 {
3788 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3789 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3790 }
3791
3792 ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
3793 size_int (xll_bitpos), 0);
3794 rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
3795 size_int (xrl_bitpos), 0);
3796
3797 if (l_const)
3798 {
3799 l_const = convert (lntype, l_const);
3800 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3801 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3802 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3803 fold (build1 (BIT_NOT_EXPR,
3804 lntype, ll_mask)),
3805 0)))
3806 {
3807 warning ("comparison is always %d", wanted_code == NE_EXPR);
3808
3809 return convert (truth_type,
3810 wanted_code == NE_EXPR
3811 ? integer_one_node : integer_zero_node);
3812 }
3813 }
3814 if (r_const)
3815 {
3816 r_const = convert (lntype, r_const);
3817 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3818 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3819 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3820 fold (build1 (BIT_NOT_EXPR,
3821 lntype, rl_mask)),
3822 0)))
3823 {
3824 warning ("comparison is always %d", wanted_code == NE_EXPR);
3825
3826 return convert (truth_type,
3827 wanted_code == NE_EXPR
3828 ? integer_one_node : integer_zero_node);
3829 }
3830 }
3831
3832 /* If the right sides are not constant, do the same for it. Also,
3833 disallow this optimization if a size or signedness mismatch occurs
3834 between the left and right sides. */
3835 if (l_const == 0)
3836 {
3837 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3838 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3839 /* Make sure the two fields on the right
3840 correspond to the left without being swapped. */
3841 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3842 return 0;
3843
3844 first_bit = MIN (lr_bitpos, rr_bitpos);
3845 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3846 rnmode = get_best_mode (end_bit - first_bit, first_bit,
3847 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3848 volatilep);
3849 if (rnmode == VOIDmode)
3850 return 0;
3851
3852 rnbitsize = GET_MODE_BITSIZE (rnmode);
3853 rnbitpos = first_bit & ~ (rnbitsize - 1);
3854 rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
3855 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3856
3857 if (BYTES_BIG_ENDIAN)
3858 {
3859 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3860 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3861 }
3862
3863 lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
3864 size_int (xlr_bitpos), 0);
3865 rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
3866 size_int (xrr_bitpos), 0);
3867
3868 /* Make a mask that corresponds to both fields being compared.
3869 Do this for both items being compared. If the operands are the
3870 same size and the bits being compared are in the same position
3871 then we can do this by masking both and comparing the masked
3872 results. */
3873 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3874 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3875 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
3876 {
3877 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3878 ll_unsignedp || rl_unsignedp);
3879 if (! all_ones_mask_p (ll_mask, lnbitsize))
3880 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
3881
3882 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
3883 lr_unsignedp || rr_unsignedp);
3884 if (! all_ones_mask_p (lr_mask, rnbitsize))
3885 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
3886
3887 return build (wanted_code, truth_type, lhs, rhs);
3888 }
3889
3890 /* There is still another way we can do something: If both pairs of
3891 fields being compared are adjacent, we may be able to make a wider
3892 field containing them both.
3893
3894 Note that we still must mask the lhs/rhs expressions. Furthermore,
3895 the mask must be shifted to account for the shift done by
3896 make_bit_field_ref. */
3897 if ((ll_bitsize + ll_bitpos == rl_bitpos
3898 && lr_bitsize + lr_bitpos == rr_bitpos)
3899 || (ll_bitpos == rl_bitpos + rl_bitsize
3900 && lr_bitpos == rr_bitpos + rr_bitsize))
3901 {
3902 tree type;
3903
3904 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
3905 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
3906 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
3907 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
3908
3909 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
3910 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
3911 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
3912 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
3913
3914 /* Convert to the smaller type before masking out unwanted bits. */
3915 type = lntype;
3916 if (lntype != rntype)
3917 {
3918 if (lnbitsize > rnbitsize)
3919 {
3920 lhs = convert (rntype, lhs);
3921 ll_mask = convert (rntype, ll_mask);
3922 type = rntype;
3923 }
3924 else if (lnbitsize < rnbitsize)
3925 {
3926 rhs = convert (lntype, rhs);
3927 lr_mask = convert (lntype, lr_mask);
3928 type = lntype;
3929 }
3930 }
3931
3932 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
3933 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3934
3935 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
3936 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
3937
3938 return build (wanted_code, truth_type, lhs, rhs);
3939 }
3940
3941 return 0;
3942 }
3943
3944 /* Handle the case of comparisons with constants. If there is something in
3945 common between the masks, those bits of the constants must be the same.
3946 If not, the condition is always false. Test for this to avoid generating
3947 incorrect code below. */
3948 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3949 if (! integer_zerop (result)
3950 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3951 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3952 {
3953 if (wanted_code == NE_EXPR)
3954 {
3955 warning ("`or' of unmatched not-equal tests is always 1");
3956 return convert (truth_type, integer_one_node);
3957 }
3958 else
3959 {
3960 warning ("`and' of mutually exclusive equal-tests is always 0");
3961 return convert (truth_type, integer_zero_node);
3962 }
3963 }
3964
3965 /* Construct the expression we will return. First get the component
3966 reference we will make. Unless the mask is all ones the width of
3967 that field, perform the mask operation. Then compare with the
3968 merged constant. */
3969 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3970 ll_unsignedp || rl_unsignedp);
3971
3972 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3973 if (! all_ones_mask_p (ll_mask, lnbitsize))
3974 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
3975
3976 return build (wanted_code, truth_type, result,
3977 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
3978 }
3979 \f
3980 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
3981 constant. */
3982
3983 static tree
3984 optimize_minmax_comparison (t)
3985 tree t;
3986 {
3987 tree type = TREE_TYPE (t);
3988 tree arg0 = TREE_OPERAND (t, 0);
3989 enum tree_code op_code;
3990 tree comp_const = TREE_OPERAND (t, 1);
3991 tree minmax_const;
3992 int consts_equal, consts_lt;
3993 tree inner;
3994
3995 STRIP_SIGN_NOPS (arg0);
3996
3997 op_code = TREE_CODE (arg0);
3998 minmax_const = TREE_OPERAND (arg0, 1);
3999 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4000 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4001 inner = TREE_OPERAND (arg0, 0);
4002
4003 /* If something does not permit us to optimize, return the original tree. */
4004 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4005 || TREE_CODE (comp_const) != INTEGER_CST
4006 || TREE_CONSTANT_OVERFLOW (comp_const)
4007 || TREE_CODE (minmax_const) != INTEGER_CST
4008 || TREE_CONSTANT_OVERFLOW (minmax_const))
4009 return t;
4010
4011 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4012 and GT_EXPR, doing the rest with recursive calls using logical
4013 simplifications. */
4014 switch (TREE_CODE (t))
4015 {
4016 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4017 return
4018 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4019
4020 case GE_EXPR:
4021 return
4022 fold (build (TRUTH_ORIF_EXPR, type,
4023 optimize_minmax_comparison
4024 (build (EQ_EXPR, type, arg0, comp_const)),
4025 optimize_minmax_comparison
4026 (build (GT_EXPR, type, arg0, comp_const))));
4027
4028 case EQ_EXPR:
4029 if (op_code == MAX_EXPR && consts_equal)
4030 /* MAX (X, 0) == 0 -> X <= 0 */
4031 return fold (build (LE_EXPR, type, inner, comp_const));
4032
4033 else if (op_code == MAX_EXPR && consts_lt)
4034 /* MAX (X, 0) == 5 -> X == 5 */
4035 return fold (build (EQ_EXPR, type, inner, comp_const));
4036
4037 else if (op_code == MAX_EXPR)
4038 /* MAX (X, 0) == -1 -> false */
4039 return omit_one_operand (type, integer_zero_node, inner);
4040
4041 else if (consts_equal)
4042 /* MIN (X, 0) == 0 -> X >= 0 */
4043 return fold (build (GE_EXPR, type, inner, comp_const));
4044
4045 else if (consts_lt)
4046 /* MIN (X, 0) == 5 -> false */
4047 return omit_one_operand (type, integer_zero_node, inner);
4048
4049 else
4050 /* MIN (X, 0) == -1 -> X == -1 */
4051 return fold (build (EQ_EXPR, type, inner, comp_const));
4052
4053 case GT_EXPR:
4054 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4055 /* MAX (X, 0) > 0 -> X > 0
4056 MAX (X, 0) > 5 -> X > 5 */
4057 return fold (build (GT_EXPR, type, inner, comp_const));
4058
4059 else if (op_code == MAX_EXPR)
4060 /* MAX (X, 0) > -1 -> true */
4061 return omit_one_operand (type, integer_one_node, inner);
4062
4063 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4064 /* MIN (X, 0) > 0 -> false
4065 MIN (X, 0) > 5 -> false */
4066 return omit_one_operand (type, integer_zero_node, inner);
4067
4068 else
4069 /* MIN (X, 0) > -1 -> X > -1 */
4070 return fold (build (GT_EXPR, type, inner, comp_const));
4071
4072 default:
4073 return t;
4074 }
4075 }
4076 \f
4077 /* T is an integer expression that is being multiplied, divided, or taken a
4078 modulus (CODE says which and what kind of divide or modulus) by a
4079 constant C. See if we can eliminate that operation by folding it with
4080 other operations already in T. WIDE_TYPE, if non-null, is a type that
4081 should be used for the computation if wider than our type.
4082
4083 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4084 (X * 2) + (Y * 4). We must, however, be assured that either the original
4085 expression would not overflow or that overflow is undefined for the type
4086 in the language in question.
4087
4088 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4089 the machine has a multiply-accumulate insn or that this is part of an
4090 addressing calculation.
4091
4092 If we return a non-null expression, it is an equivalent form of the
4093 original computation, but need not be in the original type. */
4094
4095 static tree
4096 extract_muldiv (t, c, code, wide_type)
4097 tree t;
4098 tree c;
4099 enum tree_code code;
4100 tree wide_type;
4101 {
4102 /* To avoid exponential search depth, refuse to allow recursion past
4103 three levels. Beyond that (1) it's highly unlikely that we'll find
4104 something interesting and (2) we've probably processed it before
4105 when we built the inner expression. */
4106
4107 static int depth;
4108 tree ret;
4109
4110 if (depth > 3)
4111 return NULL;
4112
4113 depth++;
4114 ret = extract_muldiv_1 (t, c, code, wide_type);
4115 depth--;
4116
4117 return ret;
4118 }
4119
4120 static tree
4121 extract_muldiv_1 (t, c, code, wide_type)
4122 tree t;
4123 tree c;
4124 enum tree_code code;
4125 tree wide_type;
4126 {
4127 tree type = TREE_TYPE (t);
4128 enum tree_code tcode = TREE_CODE (t);
4129 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4130 > GET_MODE_SIZE (TYPE_MODE (type)))
4131 ? wide_type : type);
4132 tree t1, t2;
4133 int same_p = tcode == code;
4134 tree op0 = NULL_TREE, op1 = NULL_TREE;
4135
4136 /* Don't deal with constants of zero here; they confuse the code below. */
4137 if (integer_zerop (c))
4138 return NULL_TREE;
4139
4140 if (TREE_CODE_CLASS (tcode) == '1')
4141 op0 = TREE_OPERAND (t, 0);
4142
4143 if (TREE_CODE_CLASS (tcode) == '2')
4144 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4145
4146 /* Note that we need not handle conditional operations here since fold
4147 already handles those cases. So just do arithmetic here. */
4148 switch (tcode)
4149 {
4150 case INTEGER_CST:
4151 /* For a constant, we can always simplify if we are a multiply
4152 or (for divide and modulus) if it is a multiple of our constant. */
4153 if (code == MULT_EXPR
4154 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4155 return const_binop (code, convert (ctype, t), convert (ctype, c), 0);
4156 break;
4157
4158 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4159 /* If op0 is an expression ... */
4160 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4161 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4162 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4163 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4164 /* ... and is unsigned, and its type is smaller than ctype,
4165 then we cannot pass through as widening. */
4166 && ((TREE_UNSIGNED (TREE_TYPE (op0))
4167 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4168 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4169 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4170 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4171 /* ... or its type is larger than ctype,
4172 then we cannot pass through this truncation. */
4173 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4174 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4175 /* ... or signedness changes for division or modulus,
4176 then we cannot pass through this conversion. */
4177 || (code != MULT_EXPR
4178 && (TREE_UNSIGNED (ctype)
4179 != TREE_UNSIGNED (TREE_TYPE (op0))))))
4180 break;
4181
4182 /* Pass the constant down and see if we can make a simplification. If
4183 we can, replace this expression with the inner simplification for
4184 possible later conversion to our or some other type. */
4185 if ((t2 = convert (TREE_TYPE (op0), c)) != 0
4186 && TREE_CODE (t2) == INTEGER_CST
4187 && ! TREE_CONSTANT_OVERFLOW (t2)
4188 && (0 != (t1 = extract_muldiv (op0, t2, code,
4189 code == MULT_EXPR
4190 ? ctype : NULL_TREE))))
4191 return t1;
4192 break;
4193
4194 case NEGATE_EXPR: case ABS_EXPR:
4195 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4196 return fold (build1 (tcode, ctype, convert (ctype, t1)));
4197 break;
4198
4199 case MIN_EXPR: case MAX_EXPR:
4200 /* If widening the type changes the signedness, then we can't perform
4201 this optimization as that changes the result. */
4202 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4203 break;
4204
4205 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4206 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4207 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4208 {
4209 if (tree_int_cst_sgn (c) < 0)
4210 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4211
4212 return fold (build (tcode, ctype, convert (ctype, t1),
4213 convert (ctype, t2)));
4214 }
4215 break;
4216
4217 case WITH_RECORD_EXPR:
4218 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4219 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4220 TREE_OPERAND (t, 1));
4221 break;
4222
4223 case SAVE_EXPR:
4224 /* If this has not been evaluated and the operand has no side effects,
4225 we can see if we can do something inside it and make a new one.
4226 Note that this test is overly conservative since we can do this
4227 if the only reason it had side effects is that it was another
4228 similar SAVE_EXPR, but that isn't worth bothering with. */
4229 if (SAVE_EXPR_RTL (t) == 0 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0))
4230 && 0 != (t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code,
4231 wide_type)))
4232 {
4233 t1 = save_expr (t1);
4234 if (SAVE_EXPR_PERSISTENT_P (t) && TREE_CODE (t1) == SAVE_EXPR)
4235 SAVE_EXPR_PERSISTENT_P (t1) = 1;
4236 if (is_pending_size (t))
4237 put_pending_size (t1);
4238 return t1;
4239 }
4240 break;
4241
4242 case LSHIFT_EXPR: case RSHIFT_EXPR:
4243 /* If the second operand is constant, this is a multiplication
4244 or floor division, by a power of two, so we can treat it that
4245 way unless the multiplier or divisor overflows. */
4246 if (TREE_CODE (op1) == INTEGER_CST
4247 /* const_binop may not detect overflow correctly,
4248 so check for it explicitly here. */
4249 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4250 && TREE_INT_CST_HIGH (op1) == 0
4251 && 0 != (t1 = convert (ctype,
4252 const_binop (LSHIFT_EXPR, size_one_node,
4253 op1, 0)))
4254 && ! TREE_OVERFLOW (t1))
4255 return extract_muldiv (build (tcode == LSHIFT_EXPR
4256 ? MULT_EXPR : FLOOR_DIV_EXPR,
4257 ctype, convert (ctype, op0), t1),
4258 c, code, wide_type);
4259 break;
4260
4261 case PLUS_EXPR: case MINUS_EXPR:
4262 /* See if we can eliminate the operation on both sides. If we can, we
4263 can return a new PLUS or MINUS. If we can't, the only remaining
4264 cases where we can do anything are if the second operand is a
4265 constant. */
4266 t1 = extract_muldiv (op0, c, code, wide_type);
4267 t2 = extract_muldiv (op1, c, code, wide_type);
4268 if (t1 != 0 && t2 != 0
4269 && (code == MULT_EXPR
4270 /* If not multiplication, we can only do this if both operands
4271 are divisible by c. */
4272 || (multiple_of_p (ctype, op0, c)
4273 && multiple_of_p (ctype, op1, c))))
4274 return fold (build (tcode, ctype, convert (ctype, t1),
4275 convert (ctype, t2)));
4276
4277 /* If this was a subtraction, negate OP1 and set it to be an addition.
4278 This simplifies the logic below. */
4279 if (tcode == MINUS_EXPR)
4280 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4281
4282 if (TREE_CODE (op1) != INTEGER_CST)
4283 break;
4284
4285 /* If either OP1 or C are negative, this optimization is not safe for
4286 some of the division and remainder types while for others we need
4287 to change the code. */
4288 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4289 {
4290 if (code == CEIL_DIV_EXPR)
4291 code = FLOOR_DIV_EXPR;
4292 else if (code == FLOOR_DIV_EXPR)
4293 code = CEIL_DIV_EXPR;
4294 else if (code != MULT_EXPR
4295 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4296 break;
4297 }
4298
4299 /* If it's a multiply or a division/modulus operation of a multiple
4300 of our constant, do the operation and verify it doesn't overflow. */
4301 if (code == MULT_EXPR
4302 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4303 {
4304 op1 = const_binop (code, convert (ctype, op1), convert (ctype, c), 0);
4305 if (op1 == 0 || TREE_OVERFLOW (op1))
4306 break;
4307 }
4308 else
4309 break;
4310
4311 /* If we have an unsigned type is not a sizetype, we cannot widen
4312 the operation since it will change the result if the original
4313 computation overflowed. */
4314 if (TREE_UNSIGNED (ctype)
4315 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4316 && ctype != type)
4317 break;
4318
4319 /* If we were able to eliminate our operation from the first side,
4320 apply our operation to the second side and reform the PLUS. */
4321 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4322 return fold (build (tcode, ctype, convert (ctype, t1), op1));
4323
4324 /* The last case is if we are a multiply. In that case, we can
4325 apply the distributive law to commute the multiply and addition
4326 if the multiplication of the constants doesn't overflow. */
4327 if (code == MULT_EXPR)
4328 return fold (build (tcode, ctype, fold (build (code, ctype,
4329 convert (ctype, op0),
4330 convert (ctype, c))),
4331 op1));
4332
4333 break;
4334
4335 case MULT_EXPR:
4336 /* We have a special case here if we are doing something like
4337 (C * 8) % 4 since we know that's zero. */
4338 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4339 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4340 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4341 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4342 return omit_one_operand (type, integer_zero_node, op0);
4343
4344 /* ... fall through ... */
4345
4346 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4347 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4348 /* If we can extract our operation from the LHS, do so and return a
4349 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4350 do something only if the second operand is a constant. */
4351 if (same_p
4352 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4353 return fold (build (tcode, ctype, convert (ctype, t1),
4354 convert (ctype, op1)));
4355 else if (tcode == MULT_EXPR && code == MULT_EXPR
4356 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4357 return fold (build (tcode, ctype, convert (ctype, op0),
4358 convert (ctype, t1)));
4359 else if (TREE_CODE (op1) != INTEGER_CST)
4360 return 0;
4361
4362 /* If these are the same operation types, we can associate them
4363 assuming no overflow. */
4364 if (tcode == code
4365 && 0 != (t1 = const_binop (MULT_EXPR, convert (ctype, op1),
4366 convert (ctype, c), 0))
4367 && ! TREE_OVERFLOW (t1))
4368 return fold (build (tcode, ctype, convert (ctype, op0), t1));
4369
4370 /* If these operations "cancel" each other, we have the main
4371 optimizations of this pass, which occur when either constant is a
4372 multiple of the other, in which case we replace this with either an
4373 operation or CODE or TCODE.
4374
4375 If we have an unsigned type that is not a sizetype, we cannot do
4376 this since it will change the result if the original computation
4377 overflowed. */
4378 if ((! TREE_UNSIGNED (ctype)
4379 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4380 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4381 || (tcode == MULT_EXPR
4382 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4383 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4384 {
4385 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4386 return fold (build (tcode, ctype, convert (ctype, op0),
4387 convert (ctype,
4388 const_binop (TRUNC_DIV_EXPR,
4389 op1, c, 0))));
4390 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4391 return fold (build (code, ctype, convert (ctype, op0),
4392 convert (ctype,
4393 const_binop (TRUNC_DIV_EXPR,
4394 c, op1, 0))));
4395 }
4396 break;
4397
4398 default:
4399 break;
4400 }
4401
4402 return 0;
4403 }
4404 \f
4405 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4406 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4407 that we may sometimes modify the tree. */
4408
4409 static tree
4410 strip_compound_expr (t, s)
4411 tree t;
4412 tree s;
4413 {
4414 enum tree_code code = TREE_CODE (t);
4415
4416 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4417 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4418 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4419 return TREE_OPERAND (t, 1);
4420
4421 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4422 don't bother handling any other types. */
4423 else if (code == COND_EXPR)
4424 {
4425 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4426 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4427 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4428 }
4429 else if (TREE_CODE_CLASS (code) == '1')
4430 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4431 else if (TREE_CODE_CLASS (code) == '<'
4432 || TREE_CODE_CLASS (code) == '2')
4433 {
4434 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4435 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4436 }
4437
4438 return t;
4439 }
4440 \f
4441 /* Return a node which has the indicated constant VALUE (either 0 or
4442 1), and is of the indicated TYPE. */
4443
4444 static tree
4445 constant_boolean_node (value, type)
4446 int value;
4447 tree type;
4448 {
4449 if (type == integer_type_node)
4450 return value ? integer_one_node : integer_zero_node;
4451 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4452 return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4453 integer_zero_node);
4454 else
4455 {
4456 tree t = build_int_2 (value, 0);
4457
4458 TREE_TYPE (t) = type;
4459 return t;
4460 }
4461 }
4462
4463 /* Utility function for the following routine, to see how complex a nesting of
4464 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4465 we don't care (to avoid spending too much time on complex expressions.). */
4466
4467 static int
4468 count_cond (expr, lim)
4469 tree expr;
4470 int lim;
4471 {
4472 int ctrue, cfalse;
4473
4474 if (TREE_CODE (expr) != COND_EXPR)
4475 return 0;
4476 else if (lim <= 0)
4477 return 0;
4478
4479 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4480 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4481 return MIN (lim, 1 + ctrue + cfalse);
4482 }
4483
4484 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4485 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4486 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4487 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4488 COND is the first argument to CODE; otherwise (as in the example
4489 given here), it is the second argument. TYPE is the type of the
4490 original expression. */
4491
4492 static tree
4493 fold_binary_op_with_conditional_arg (code, type, cond, arg, cond_first_p)
4494 enum tree_code code;
4495 tree type;
4496 tree cond;
4497 tree arg;
4498 int cond_first_p;
4499 {
4500 tree test, true_value, false_value;
4501 tree lhs = NULL_TREE;
4502 tree rhs = NULL_TREE;
4503 /* In the end, we'll produce a COND_EXPR. Both arms of the
4504 conditional expression will be binary operations. The left-hand
4505 side of the expression to be executed if the condition is true
4506 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4507 of the expression to be executed if the condition is true will be
4508 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4509 but apply to the expression to be executed if the conditional is
4510 false. */
4511 tree *true_lhs;
4512 tree *true_rhs;
4513 tree *false_lhs;
4514 tree *false_rhs;
4515 /* These are the codes to use for the left-hand side and right-hand
4516 side of the COND_EXPR. Normally, they are the same as CODE. */
4517 enum tree_code lhs_code = code;
4518 enum tree_code rhs_code = code;
4519 /* And these are the types of the expressions. */
4520 tree lhs_type = type;
4521 tree rhs_type = type;
4522 int save = 0;
4523
4524 if (cond_first_p)
4525 {
4526 true_rhs = false_rhs = &arg;
4527 true_lhs = &true_value;
4528 false_lhs = &false_value;
4529 }
4530 else
4531 {
4532 true_lhs = false_lhs = &arg;
4533 true_rhs = &true_value;
4534 false_rhs = &false_value;
4535 }
4536
4537 if (TREE_CODE (cond) == COND_EXPR)
4538 {
4539 test = TREE_OPERAND (cond, 0);
4540 true_value = TREE_OPERAND (cond, 1);
4541 false_value = TREE_OPERAND (cond, 2);
4542 /* If this operand throws an expression, then it does not make
4543 sense to try to perform a logical or arithmetic operation
4544 involving it. Instead of building `a + throw 3' for example,
4545 we simply build `a, throw 3'. */
4546 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4547 {
4548 if (! cond_first_p)
4549 {
4550 lhs_code = COMPOUND_EXPR;
4551 lhs_type = void_type_node;
4552 }
4553 else
4554 lhs = true_value;
4555 }
4556 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4557 {
4558 if (! cond_first_p)
4559 {
4560 rhs_code = COMPOUND_EXPR;
4561 rhs_type = void_type_node;
4562 }
4563 else
4564 rhs = false_value;
4565 }
4566 }
4567 else
4568 {
4569 tree testtype = TREE_TYPE (cond);
4570 test = cond;
4571 true_value = convert (testtype, integer_one_node);
4572 false_value = convert (testtype, integer_zero_node);
4573 }
4574
4575 /* If ARG is complex we want to make sure we only evaluate it once. Though
4576 this is only required if it is volatile, it might be more efficient even
4577 if it is not. However, if we succeed in folding one part to a constant,
4578 we do not need to make this SAVE_EXPR. Since we do this optimization
4579 primarily to see if we do end up with constant and this SAVE_EXPR
4580 interferes with later optimizations, suppressing it when we can is
4581 important.
4582
4583 If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4584 do so. Don't try to see if the result is a constant if an arm is a
4585 COND_EXPR since we get exponential behavior in that case. */
4586
4587 if (saved_expr_p (arg))
4588 save = 1;
4589 else if (lhs == 0 && rhs == 0
4590 && !TREE_CONSTANT (arg)
4591 && (*lang_hooks.decls.global_bindings_p) () == 0
4592 && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4593 || TREE_SIDE_EFFECTS (arg)))
4594 {
4595 if (TREE_CODE (true_value) != COND_EXPR)
4596 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4597
4598 if (TREE_CODE (false_value) != COND_EXPR)
4599 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4600
4601 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4602 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4603 {
4604 arg = save_expr (arg);
4605 lhs = rhs = 0;
4606 save = 1;
4607 }
4608 }
4609
4610 if (lhs == 0)
4611 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4612 if (rhs == 0)
4613 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4614
4615 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4616
4617 if (save)
4618 return build (COMPOUND_EXPR, type,
4619 convert (void_type_node, arg),
4620 strip_compound_expr (test, arg));
4621 else
4622 return convert (type, test);
4623 }
4624
4625 \f
4626 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4627
4628 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4629 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4630 ADDEND is the same as X.
4631
4632 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4633 and finite. The problematic cases are when X is zero, and its mode
4634 has signed zeros. In the case of rounding towards -infinity,
4635 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4636 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4637
4638 static bool
4639 fold_real_zero_addition_p (type, addend, negate)
4640 tree type, addend;
4641 int negate;
4642 {
4643 if (!real_zerop (addend))
4644 return false;
4645
4646 /* Don't allow the fold with -fsignaling-nans. */
4647 if (HONOR_SNANS (TYPE_MODE (type)))
4648 return false;
4649
4650 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4651 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4652 return true;
4653
4654 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4655 if (TREE_CODE (addend) == REAL_CST
4656 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4657 negate = !negate;
4658
4659 /* The mode has signed zeros, and we have to honor their sign.
4660 In this situation, there is only one case we can return true for.
4661 X - 0 is the same as X unless rounding towards -infinity is
4662 supported. */
4663 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4664 }
4665
4666 /* Subroutine of fold() that checks comparisons of built-in math
4667 functions against real constants.
4668
4669 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
4670 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
4671 is the type of the result and ARG0 and ARG1 are the operands of the
4672 comparison. ARG1 must be a TREE_REAL_CST.
4673
4674 The function returns the constant folded tree if a simplification
4675 can be made, and NULL_TREE otherwise. */
4676
4677 static tree
4678 fold_mathfn_compare (fcode, code, type, arg0, arg1)
4679 enum built_in_function fcode;
4680 enum tree_code code;
4681 tree type, arg0, arg1;
4682 {
4683 REAL_VALUE_TYPE c;
4684
4685 if (fcode == BUILT_IN_SQRT
4686 || fcode == BUILT_IN_SQRTF
4687 || fcode == BUILT_IN_SQRTL)
4688 {
4689 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
4690 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
4691
4692 c = TREE_REAL_CST (arg1);
4693 if (REAL_VALUE_NEGATIVE (c))
4694 {
4695 /* sqrt(x) < y is always false, if y is negative. */
4696 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
4697 return omit_one_operand (type,
4698 convert (type, integer_zero_node),
4699 arg);
4700
4701 /* sqrt(x) > y is always true, if y is negative and we
4702 don't care about NaNs, i.e. negative values of x. */
4703 if (code == NE_EXPR || !HONOR_NANS (mode))
4704 return omit_one_operand (type,
4705 convert (type, integer_one_node),
4706 arg);
4707
4708 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
4709 return fold (build (GE_EXPR, type, arg,
4710 build_real (TREE_TYPE (arg), dconst0)));
4711 }
4712 else if (code == GT_EXPR || code == GE_EXPR)
4713 {
4714 REAL_VALUE_TYPE c2;
4715
4716 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4717 real_convert (&c2, mode, &c2);
4718
4719 if (REAL_VALUE_ISINF (c2))
4720 {
4721 /* sqrt(x) > y is x == +Inf, when y is very large. */
4722 if (HONOR_INFINITIES (mode))
4723 return fold (build (EQ_EXPR, type, arg,
4724 build_real (TREE_TYPE (arg), c2)));
4725
4726 /* sqrt(x) > y is always false, when y is very large
4727 and we don't care about infinities. */
4728 return omit_one_operand (type,
4729 convert (type, integer_zero_node),
4730 arg);
4731 }
4732
4733 /* sqrt(x) > c is the same as x > c*c. */
4734 return fold (build (code, type, arg,
4735 build_real (TREE_TYPE (arg), c2)));
4736 }
4737 else if (code == LT_EXPR || code == LE_EXPR)
4738 {
4739 REAL_VALUE_TYPE c2;
4740
4741 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4742 real_convert (&c2, mode, &c2);
4743
4744 if (REAL_VALUE_ISINF (c2))
4745 {
4746 /* sqrt(x) < y is always true, when y is a very large
4747 value and we don't care about NaNs or Infinities. */
4748 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
4749 return omit_one_operand (type,
4750 convert (type, integer_one_node),
4751 arg);
4752
4753 /* sqrt(x) < y is x != +Inf when y is very large and we
4754 don't care about NaNs. */
4755 if (! HONOR_NANS (mode))
4756 return fold (build (NE_EXPR, type, arg,
4757 build_real (TREE_TYPE (arg), c2)));
4758
4759 /* sqrt(x) < y is x >= 0 when y is very large and we
4760 don't care about Infinities. */
4761 if (! HONOR_INFINITIES (mode))
4762 return fold (build (GE_EXPR, type, arg,
4763 build_real (TREE_TYPE (arg), dconst0)));
4764
4765 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
4766 if ((*lang_hooks.decls.global_bindings_p) () != 0
4767 || contains_placeholder_p (arg))
4768 return NULL_TREE;
4769
4770 arg = save_expr (arg);
4771 return fold (build (TRUTH_ANDIF_EXPR, type,
4772 fold (build (GE_EXPR, type, arg,
4773 build_real (TREE_TYPE (arg),
4774 dconst0))),
4775 fold (build (NE_EXPR, type, arg,
4776 build_real (TREE_TYPE (arg),
4777 c2)))));
4778 }
4779
4780 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
4781 if (! HONOR_NANS (mode))
4782 return fold (build (code, type, arg,
4783 build_real (TREE_TYPE (arg), c2)));
4784
4785 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
4786 if ((*lang_hooks.decls.global_bindings_p) () == 0
4787 && ! contains_placeholder_p (arg))
4788 {
4789 arg = save_expr (arg);
4790 return fold (build (TRUTH_ANDIF_EXPR, type,
4791 fold (build (GE_EXPR, type, arg,
4792 build_real (TREE_TYPE (arg),
4793 dconst0))),
4794 fold (build (code, type, arg,
4795 build_real (TREE_TYPE (arg),
4796 c2)))));
4797 }
4798 }
4799 }
4800
4801 return NULL_TREE;
4802 }
4803
4804 /* Subroutine of fold() that optimizes comparisons against Infinities,
4805 either +Inf or -Inf.
4806
4807 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
4808 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
4809 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
4810
4811 The function returns the constant folded tree if a simplification
4812 can be made, and NULL_TREE otherwise. */
4813
4814 static tree
4815 fold_inf_compare (code, type, arg0, arg1)
4816 enum tree_code code;
4817 tree type, arg0, arg1;
4818 {
4819 /* For negative infinity swap the sense of the comparison. */
4820 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
4821 code = swap_tree_comparison (code);
4822
4823 switch (code)
4824 {
4825 case GT_EXPR:
4826 /* x > +Inf is always false, if with ignore sNANs. */
4827 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
4828 return NULL_TREE;
4829 return omit_one_operand (type,
4830 convert (type, integer_zero_node),
4831 arg0);
4832
4833 case LE_EXPR:
4834 /* x <= +Inf is always true, if we don't case about NaNs. */
4835 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
4836 return omit_one_operand (type,
4837 convert (type, integer_one_node),
4838 arg0);
4839
4840 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
4841 if ((*lang_hooks.decls.global_bindings_p) () == 0
4842 && ! contains_placeholder_p (arg0))
4843 {
4844 arg0 = save_expr (arg0);
4845 return fold (build (EQ_EXPR, type, arg0, arg0));
4846 }
4847 break;
4848
4849 case EQ_EXPR: /* ??? x == +Inf is x > DBL_MAX */
4850 case GE_EXPR: /* ??? x >= +Inf is x > DBL_MAX */
4851 case LT_EXPR: /* ??? x < +Inf is x <= DBL_MAX */
4852 case NE_EXPR: /* ??? x != +Inf is !(x > DBL_MAX) */
4853
4854 default:
4855 break;
4856 }
4857
4858 return NULL_TREE;
4859 }
4860
4861 /* Perform constant folding and related simplification of EXPR.
4862 The related simplifications include x*1 => x, x*0 => 0, etc.,
4863 and application of the associative law.
4864 NOP_EXPR conversions may be removed freely (as long as we
4865 are careful not to change the C type of the overall expression)
4866 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4867 but we can constant-fold them if they have constant operands. */
4868
4869 tree
4870 fold (expr)
4871 tree expr;
4872 {
4873 tree t = expr;
4874 tree t1 = NULL_TREE;
4875 tree tem;
4876 tree type = TREE_TYPE (expr);
4877 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4878 enum tree_code code = TREE_CODE (t);
4879 int kind = TREE_CODE_CLASS (code);
4880 int invert;
4881 /* WINS will be nonzero when the switch is done
4882 if all operands are constant. */
4883 int wins = 1;
4884
4885 /* Don't try to process an RTL_EXPR since its operands aren't trees.
4886 Likewise for a SAVE_EXPR that's already been evaluated. */
4887 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
4888 return t;
4889
4890 /* Return right away if a constant. */
4891 if (kind == 'c')
4892 return t;
4893
4894 #ifdef MAX_INTEGER_COMPUTATION_MODE
4895 check_max_integer_computation_mode (expr);
4896 #endif
4897
4898 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
4899 {
4900 tree subop;
4901
4902 /* Special case for conversion ops that can have fixed point args. */
4903 arg0 = TREE_OPERAND (t, 0);
4904
4905 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
4906 if (arg0 != 0)
4907 STRIP_SIGN_NOPS (arg0);
4908
4909 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
4910 subop = TREE_REALPART (arg0);
4911 else
4912 subop = arg0;
4913
4914 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
4915 && TREE_CODE (subop) != REAL_CST
4916 )
4917 /* Note that TREE_CONSTANT isn't enough:
4918 static var addresses are constant but we can't
4919 do arithmetic on them. */
4920 wins = 0;
4921 }
4922 else if (IS_EXPR_CODE_CLASS (kind) || kind == 'r')
4923 {
4924 int len = first_rtl_op (code);
4925 int i;
4926 for (i = 0; i < len; i++)
4927 {
4928 tree op = TREE_OPERAND (t, i);
4929 tree subop;
4930
4931 if (op == 0)
4932 continue; /* Valid for CALL_EXPR, at least. */
4933
4934 if (kind == '<' || code == RSHIFT_EXPR)
4935 {
4936 /* Signedness matters here. Perhaps we can refine this
4937 later. */
4938 STRIP_SIGN_NOPS (op);
4939 }
4940 else
4941 /* Strip any conversions that don't change the mode. */
4942 STRIP_NOPS (op);
4943
4944 if (TREE_CODE (op) == COMPLEX_CST)
4945 subop = TREE_REALPART (op);
4946 else
4947 subop = op;
4948
4949 if (TREE_CODE (subop) != INTEGER_CST
4950 && TREE_CODE (subop) != REAL_CST)
4951 /* Note that TREE_CONSTANT isn't enough:
4952 static var addresses are constant but we can't
4953 do arithmetic on them. */
4954 wins = 0;
4955
4956 if (i == 0)
4957 arg0 = op;
4958 else if (i == 1)
4959 arg1 = op;
4960 }
4961 }
4962
4963 /* If this is a commutative operation, and ARG0 is a constant, move it
4964 to ARG1 to reduce the number of tests below. */
4965 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
4966 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
4967 || code == BIT_AND_EXPR)
4968 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
4969 {
4970 tem = arg0; arg0 = arg1; arg1 = tem;
4971
4972 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
4973 TREE_OPERAND (t, 1) = tem;
4974 }
4975
4976 /* Now WINS is set as described above,
4977 ARG0 is the first operand of EXPR,
4978 and ARG1 is the second operand (if it has more than one operand).
4979
4980 First check for cases where an arithmetic operation is applied to a
4981 compound, conditional, or comparison operation. Push the arithmetic
4982 operation inside the compound or conditional to see if any folding
4983 can then be done. Convert comparison to conditional for this purpose.
4984 The also optimizes non-constant cases that used to be done in
4985 expand_expr.
4986
4987 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
4988 one of the operands is a comparison and the other is a comparison, a
4989 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
4990 code below would make the expression more complex. Change it to a
4991 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
4992 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
4993
4994 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
4995 || code == EQ_EXPR || code == NE_EXPR)
4996 && ((truth_value_p (TREE_CODE (arg0))
4997 && (truth_value_p (TREE_CODE (arg1))
4998 || (TREE_CODE (arg1) == BIT_AND_EXPR
4999 && integer_onep (TREE_OPERAND (arg1, 1)))))
5000 || (truth_value_p (TREE_CODE (arg1))
5001 && (truth_value_p (TREE_CODE (arg0))
5002 || (TREE_CODE (arg0) == BIT_AND_EXPR
5003 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5004 {
5005 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5006 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5007 : TRUTH_XOR_EXPR,
5008 type, arg0, arg1));
5009
5010 if (code == EQ_EXPR)
5011 t = invert_truthvalue (t);
5012
5013 return t;
5014 }
5015
5016 if (TREE_CODE_CLASS (code) == '1')
5017 {
5018 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5019 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5020 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5021 else if (TREE_CODE (arg0) == COND_EXPR)
5022 {
5023 tree arg01 = TREE_OPERAND (arg0, 1);
5024 tree arg02 = TREE_OPERAND (arg0, 2);
5025 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5026 arg01 = fold (build1 (code, type, arg01));
5027 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5028 arg02 = fold (build1 (code, type, arg02));
5029 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5030 arg01, arg02));
5031
5032 /* If this was a conversion, and all we did was to move into
5033 inside the COND_EXPR, bring it back out. But leave it if
5034 it is a conversion from integer to integer and the
5035 result precision is no wider than a word since such a
5036 conversion is cheap and may be optimized away by combine,
5037 while it couldn't if it were outside the COND_EXPR. Then return
5038 so we don't get into an infinite recursion loop taking the
5039 conversion out and then back in. */
5040
5041 if ((code == NOP_EXPR || code == CONVERT_EXPR
5042 || code == NON_LVALUE_EXPR)
5043 && TREE_CODE (t) == COND_EXPR
5044 && TREE_CODE (TREE_OPERAND (t, 1)) == code
5045 && TREE_CODE (TREE_OPERAND (t, 2)) == code
5046 && ! VOID_TYPE_P (TREE_OPERAND (t, 1))
5047 && ! VOID_TYPE_P (TREE_OPERAND (t, 2))
5048 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
5049 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
5050 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
5051 && (INTEGRAL_TYPE_P
5052 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
5053 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
5054 t = build1 (code, type,
5055 build (COND_EXPR,
5056 TREE_TYPE (TREE_OPERAND
5057 (TREE_OPERAND (t, 1), 0)),
5058 TREE_OPERAND (t, 0),
5059 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
5060 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
5061 return t;
5062 }
5063 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5064 return fold (build (COND_EXPR, type, arg0,
5065 fold (build1 (code, type, integer_one_node)),
5066 fold (build1 (code, type, integer_zero_node))));
5067 }
5068 else if (TREE_CODE_CLASS (code) == '<'
5069 && TREE_CODE (arg0) == COMPOUND_EXPR)
5070 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5071 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5072 else if (TREE_CODE_CLASS (code) == '<'
5073 && TREE_CODE (arg1) == COMPOUND_EXPR)
5074 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5075 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5076 else if (TREE_CODE_CLASS (code) == '2'
5077 || TREE_CODE_CLASS (code) == '<')
5078 {
5079 if (TREE_CODE (arg1) == COMPOUND_EXPR
5080 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1, 0))
5081 && ! TREE_SIDE_EFFECTS (arg0))
5082 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5083 fold (build (code, type,
5084 arg0, TREE_OPERAND (arg1, 1))));
5085 else if ((TREE_CODE (arg1) == COND_EXPR
5086 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
5087 && TREE_CODE_CLASS (code) != '<'))
5088 && (TREE_CODE (arg0) != COND_EXPR
5089 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5090 && (! TREE_SIDE_EFFECTS (arg0)
5091 || ((*lang_hooks.decls.global_bindings_p) () == 0
5092 && ! contains_placeholder_p (arg0))))
5093 return
5094 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5095 /*cond_first_p=*/0);
5096 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
5097 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5098 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5099 else if ((TREE_CODE (arg0) == COND_EXPR
5100 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5101 && TREE_CODE_CLASS (code) != '<'))
5102 && (TREE_CODE (arg1) != COND_EXPR
5103 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5104 && (! TREE_SIDE_EFFECTS (arg1)
5105 || ((*lang_hooks.decls.global_bindings_p) () == 0
5106 && ! contains_placeholder_p (arg1))))
5107 return
5108 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5109 /*cond_first_p=*/1);
5110 }
5111
5112 switch (code)
5113 {
5114 case INTEGER_CST:
5115 case REAL_CST:
5116 case VECTOR_CST:
5117 case STRING_CST:
5118 case COMPLEX_CST:
5119 case CONSTRUCTOR:
5120 return t;
5121
5122 case CONST_DECL:
5123 return fold (DECL_INITIAL (t));
5124
5125 case NOP_EXPR:
5126 case FLOAT_EXPR:
5127 case CONVERT_EXPR:
5128 case FIX_TRUNC_EXPR:
5129 /* Other kinds of FIX are not handled properly by fold_convert. */
5130
5131 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
5132 return TREE_OPERAND (t, 0);
5133
5134 /* Handle cases of two conversions in a row. */
5135 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5136 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5137 {
5138 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5139 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5140 tree final_type = TREE_TYPE (t);
5141 int inside_int = INTEGRAL_TYPE_P (inside_type);
5142 int inside_ptr = POINTER_TYPE_P (inside_type);
5143 int inside_float = FLOAT_TYPE_P (inside_type);
5144 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5145 int inside_unsignedp = TREE_UNSIGNED (inside_type);
5146 int inter_int = INTEGRAL_TYPE_P (inter_type);
5147 int inter_ptr = POINTER_TYPE_P (inter_type);
5148 int inter_float = FLOAT_TYPE_P (inter_type);
5149 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5150 int inter_unsignedp = TREE_UNSIGNED (inter_type);
5151 int final_int = INTEGRAL_TYPE_P (final_type);
5152 int final_ptr = POINTER_TYPE_P (final_type);
5153 int final_float = FLOAT_TYPE_P (final_type);
5154 unsigned int final_prec = TYPE_PRECISION (final_type);
5155 int final_unsignedp = TREE_UNSIGNED (final_type);
5156
5157 /* In addition to the cases of two conversions in a row
5158 handled below, if we are converting something to its own
5159 type via an object of identical or wider precision, neither
5160 conversion is needed. */
5161 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
5162 && ((inter_int && final_int) || (inter_float && final_float))
5163 && inter_prec >= final_prec)
5164 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5165
5166 /* Likewise, if the intermediate and final types are either both
5167 float or both integer, we don't need the middle conversion if
5168 it is wider than the final type and doesn't change the signedness
5169 (for integers). Avoid this if the final type is a pointer
5170 since then we sometimes need the inner conversion. Likewise if
5171 the outer has a precision not equal to the size of its mode. */
5172 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5173 || (inter_float && inside_float))
5174 && inter_prec >= inside_prec
5175 && (inter_float || inter_unsignedp == inside_unsignedp)
5176 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5177 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5178 && ! final_ptr)
5179 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5180
5181 /* If we have a sign-extension of a zero-extended value, we can
5182 replace that by a single zero-extension. */
5183 if (inside_int && inter_int && final_int
5184 && inside_prec < inter_prec && inter_prec < final_prec
5185 && inside_unsignedp && !inter_unsignedp)
5186 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5187
5188 /* Two conversions in a row are not needed unless:
5189 - some conversion is floating-point (overstrict for now), or
5190 - the intermediate type is narrower than both initial and
5191 final, or
5192 - the intermediate type and innermost type differ in signedness,
5193 and the outermost type is wider than the intermediate, or
5194 - the initial type is a pointer type and the precisions of the
5195 intermediate and final types differ, or
5196 - the final type is a pointer type and the precisions of the
5197 initial and intermediate types differ. */
5198 if (! inside_float && ! inter_float && ! final_float
5199 && (inter_prec > inside_prec || inter_prec > final_prec)
5200 && ! (inside_int && inter_int
5201 && inter_unsignedp != inside_unsignedp
5202 && inter_prec < final_prec)
5203 && ((inter_unsignedp && inter_prec > inside_prec)
5204 == (final_unsignedp && final_prec > inter_prec))
5205 && ! (inside_ptr && inter_prec != final_prec)
5206 && ! (final_ptr && inside_prec != inter_prec)
5207 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5208 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5209 && ! final_ptr)
5210 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5211 }
5212
5213 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5214 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5215 /* Detect assigning a bitfield. */
5216 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5217 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5218 {
5219 /* Don't leave an assignment inside a conversion
5220 unless assigning a bitfield. */
5221 tree prev = TREE_OPERAND (t, 0);
5222 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
5223 /* First do the assignment, then return converted constant. */
5224 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
5225 TREE_USED (t) = 1;
5226 return t;
5227 }
5228
5229 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5230 constants (if x has signed type, the sign bit cannot be set
5231 in c). This folds extension into the BIT_AND_EXPR. */
5232 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
5233 && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
5234 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5235 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5236 {
5237 tree and = TREE_OPERAND (t, 0);
5238 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5239 int change = 0;
5240
5241 if (TREE_UNSIGNED (TREE_TYPE (and))
5242 || (TYPE_PRECISION (TREE_TYPE (t))
5243 <= TYPE_PRECISION (TREE_TYPE (and))))
5244 change = 1;
5245 else if (TYPE_PRECISION (TREE_TYPE (and1))
5246 <= HOST_BITS_PER_WIDE_INT
5247 && host_integerp (and1, 1))
5248 {
5249 unsigned HOST_WIDE_INT cst;
5250
5251 cst = tree_low_cst (and1, 1);
5252 cst &= (HOST_WIDE_INT) -1
5253 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5254 change = (cst == 0);
5255 #ifdef LOAD_EXTEND_OP
5256 if (change
5257 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5258 == ZERO_EXTEND))
5259 {
5260 tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
5261 and0 = convert (uns, and0);
5262 and1 = convert (uns, and1);
5263 }
5264 #endif
5265 }
5266 if (change)
5267 return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
5268 convert (TREE_TYPE (t), and0),
5269 convert (TREE_TYPE (t), and1)));
5270 }
5271
5272 if (!wins)
5273 {
5274 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
5275 return t;
5276 }
5277 return fold_convert (t, arg0);
5278
5279 case VIEW_CONVERT_EXPR:
5280 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5281 return build1 (VIEW_CONVERT_EXPR, type,
5282 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5283 return t;
5284
5285 case COMPONENT_REF:
5286 if (TREE_CODE (arg0) == CONSTRUCTOR)
5287 {
5288 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5289 if (m)
5290 t = TREE_VALUE (m);
5291 }
5292 return t;
5293
5294 case RANGE_EXPR:
5295 TREE_CONSTANT (t) = wins;
5296 return t;
5297
5298 case NEGATE_EXPR:
5299 if (wins)
5300 {
5301 if (TREE_CODE (arg0) == INTEGER_CST)
5302 {
5303 unsigned HOST_WIDE_INT low;
5304 HOST_WIDE_INT high;
5305 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5306 TREE_INT_CST_HIGH (arg0),
5307 &low, &high);
5308 t = build_int_2 (low, high);
5309 TREE_TYPE (t) = type;
5310 TREE_OVERFLOW (t)
5311 = (TREE_OVERFLOW (arg0)
5312 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
5313 TREE_CONSTANT_OVERFLOW (t)
5314 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5315 }
5316 else if (TREE_CODE (arg0) == REAL_CST)
5317 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5318 }
5319 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5320 return TREE_OPERAND (arg0, 0);
5321 /* Convert -((double)float) into (double)(-float). */
5322 else if (TREE_CODE (arg0) == NOP_EXPR
5323 && TREE_CODE (type) == REAL_TYPE)
5324 {
5325 tree targ0 = strip_float_extensions (arg0);
5326 if (targ0 != arg0)
5327 return convert (type, build1 (NEGATE_EXPR, TREE_TYPE (targ0), targ0));
5328
5329 }
5330
5331 /* Convert - (a - b) to (b - a) for non-floating-point. */
5332 else if (TREE_CODE (arg0) == MINUS_EXPR
5333 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
5334 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
5335 TREE_OPERAND (arg0, 0));
5336
5337 return t;
5338
5339 case ABS_EXPR:
5340 if (wins)
5341 {
5342 if (TREE_CODE (arg0) == INTEGER_CST)
5343 {
5344 /* If the value is unsigned, then the absolute value is
5345 the same as the ordinary value. */
5346 if (TREE_UNSIGNED (type))
5347 return arg0;
5348 /* Similarly, if the value is non-negative. */
5349 else if (INT_CST_LT (integer_minus_one_node, arg0))
5350 return arg0;
5351 /* If the value is negative, then the absolute value is
5352 its negation. */
5353 else
5354 {
5355 unsigned HOST_WIDE_INT low;
5356 HOST_WIDE_INT high;
5357 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5358 TREE_INT_CST_HIGH (arg0),
5359 &low, &high);
5360 t = build_int_2 (low, high);
5361 TREE_TYPE (t) = type;
5362 TREE_OVERFLOW (t)
5363 = (TREE_OVERFLOW (arg0)
5364 | force_fit_type (t, overflow));
5365 TREE_CONSTANT_OVERFLOW (t)
5366 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5367 }
5368 }
5369 else if (TREE_CODE (arg0) == REAL_CST)
5370 {
5371 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5372 t = build_real (type,
5373 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5374 }
5375 }
5376 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
5377 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
5378 /* Convert fabs((double)float) into (double)fabsf(float). */
5379 else if (TREE_CODE (arg0) == NOP_EXPR
5380 && TREE_CODE (type) == REAL_TYPE)
5381 {
5382 tree targ0 = strip_float_extensions (arg0);
5383 if (targ0 != arg0)
5384 return convert (type, build1 (ABS_EXPR, TREE_TYPE (targ0), targ0));
5385
5386 }
5387 else
5388 {
5389 /* fabs(sqrt(x)) = sqrt(x) and fabs(exp(x)) = exp(x). */
5390 enum built_in_function fcode = builtin_mathfn_code (arg0);
5391 if (fcode == BUILT_IN_SQRT
5392 || fcode == BUILT_IN_SQRTF
5393 || fcode == BUILT_IN_SQRTL
5394 || fcode == BUILT_IN_EXP
5395 || fcode == BUILT_IN_EXPF
5396 || fcode == BUILT_IN_EXPL)
5397 t = arg0;
5398 }
5399 return t;
5400
5401 case CONJ_EXPR:
5402 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5403 return convert (type, arg0);
5404 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5405 return build (COMPLEX_EXPR, type,
5406 TREE_OPERAND (arg0, 0),
5407 negate_expr (TREE_OPERAND (arg0, 1)));
5408 else if (TREE_CODE (arg0) == COMPLEX_CST)
5409 return build_complex (type, TREE_REALPART (arg0),
5410 negate_expr (TREE_IMAGPART (arg0)));
5411 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5412 return fold (build (TREE_CODE (arg0), type,
5413 fold (build1 (CONJ_EXPR, type,
5414 TREE_OPERAND (arg0, 0))),
5415 fold (build1 (CONJ_EXPR,
5416 type, TREE_OPERAND (arg0, 1)))));
5417 else if (TREE_CODE (arg0) == CONJ_EXPR)
5418 return TREE_OPERAND (arg0, 0);
5419 return t;
5420
5421 case BIT_NOT_EXPR:
5422 if (wins)
5423 {
5424 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5425 ~ TREE_INT_CST_HIGH (arg0));
5426 TREE_TYPE (t) = type;
5427 force_fit_type (t, 0);
5428 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5429 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5430 }
5431 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5432 return TREE_OPERAND (arg0, 0);
5433 return t;
5434
5435 case PLUS_EXPR:
5436 /* A + (-B) -> A - B */
5437 if (TREE_CODE (arg1) == NEGATE_EXPR)
5438 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5439 /* (-A) + B -> B - A */
5440 if (TREE_CODE (arg0) == NEGATE_EXPR)
5441 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5442 else if (! FLOAT_TYPE_P (type))
5443 {
5444 if (integer_zerop (arg1))
5445 return non_lvalue (convert (type, arg0));
5446
5447 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5448 with a constant, and the two constants have no bits in common,
5449 we should treat this as a BIT_IOR_EXPR since this may produce more
5450 simplifications. */
5451 if (TREE_CODE (arg0) == BIT_AND_EXPR
5452 && TREE_CODE (arg1) == BIT_AND_EXPR
5453 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5454 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5455 && integer_zerop (const_binop (BIT_AND_EXPR,
5456 TREE_OPERAND (arg0, 1),
5457 TREE_OPERAND (arg1, 1), 0)))
5458 {
5459 code = BIT_IOR_EXPR;
5460 goto bit_ior;
5461 }
5462
5463 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5464 (plus (plus (mult) (mult)) (foo)) so that we can
5465 take advantage of the factoring cases below. */
5466 if ((TREE_CODE (arg0) == PLUS_EXPR
5467 && TREE_CODE (arg1) == MULT_EXPR)
5468 || (TREE_CODE (arg1) == PLUS_EXPR
5469 && TREE_CODE (arg0) == MULT_EXPR))
5470 {
5471 tree parg0, parg1, parg, marg;
5472
5473 if (TREE_CODE (arg0) == PLUS_EXPR)
5474 parg = arg0, marg = arg1;
5475 else
5476 parg = arg1, marg = arg0;
5477 parg0 = TREE_OPERAND (parg, 0);
5478 parg1 = TREE_OPERAND (parg, 1);
5479 STRIP_NOPS (parg0);
5480 STRIP_NOPS (parg1);
5481
5482 if (TREE_CODE (parg0) == MULT_EXPR
5483 && TREE_CODE (parg1) != MULT_EXPR)
5484 return fold (build (PLUS_EXPR, type,
5485 fold (build (PLUS_EXPR, type,
5486 convert (type, parg0),
5487 convert (type, marg))),
5488 convert (type, parg1)));
5489 if (TREE_CODE (parg0) != MULT_EXPR
5490 && TREE_CODE (parg1) == MULT_EXPR)
5491 return fold (build (PLUS_EXPR, type,
5492 fold (build (PLUS_EXPR, type,
5493 convert (type, parg1),
5494 convert (type, marg))),
5495 convert (type, parg0)));
5496 }
5497
5498 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5499 {
5500 tree arg00, arg01, arg10, arg11;
5501 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5502
5503 /* (A * C) + (B * C) -> (A+B) * C.
5504 We are most concerned about the case where C is a constant,
5505 but other combinations show up during loop reduction. Since
5506 it is not difficult, try all four possibilities. */
5507
5508 arg00 = TREE_OPERAND (arg0, 0);
5509 arg01 = TREE_OPERAND (arg0, 1);
5510 arg10 = TREE_OPERAND (arg1, 0);
5511 arg11 = TREE_OPERAND (arg1, 1);
5512 same = NULL_TREE;
5513
5514 if (operand_equal_p (arg01, arg11, 0))
5515 same = arg01, alt0 = arg00, alt1 = arg10;
5516 else if (operand_equal_p (arg00, arg10, 0))
5517 same = arg00, alt0 = arg01, alt1 = arg11;
5518 else if (operand_equal_p (arg00, arg11, 0))
5519 same = arg00, alt0 = arg01, alt1 = arg10;
5520 else if (operand_equal_p (arg01, arg10, 0))
5521 same = arg01, alt0 = arg00, alt1 = arg11;
5522
5523 /* No identical multiplicands; see if we can find a common
5524 power-of-two factor in non-power-of-two multiplies. This
5525 can help in multi-dimensional array access. */
5526 else if (TREE_CODE (arg01) == INTEGER_CST
5527 && TREE_CODE (arg11) == INTEGER_CST
5528 && TREE_INT_CST_HIGH (arg01) == 0
5529 && TREE_INT_CST_HIGH (arg11) == 0)
5530 {
5531 HOST_WIDE_INT int01, int11, tmp;
5532 int01 = TREE_INT_CST_LOW (arg01);
5533 int11 = TREE_INT_CST_LOW (arg11);
5534
5535 /* Move min of absolute values to int11. */
5536 if ((int01 >= 0 ? int01 : -int01)
5537 < (int11 >= 0 ? int11 : -int11))
5538 {
5539 tmp = int01, int01 = int11, int11 = tmp;
5540 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5541 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5542 }
5543
5544 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5545 {
5546 alt0 = fold (build (MULT_EXPR, type, arg00,
5547 build_int_2 (int01 / int11, 0)));
5548 alt1 = arg10;
5549 same = arg11;
5550 }
5551 }
5552
5553 if (same)
5554 return fold (build (MULT_EXPR, type,
5555 fold (build (PLUS_EXPR, type, alt0, alt1)),
5556 same));
5557 }
5558 }
5559
5560 /* See if ARG1 is zero and X + ARG1 reduces to X. */
5561 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
5562 return non_lvalue (convert (type, arg0));
5563
5564 /* Likewise if the operands are reversed. */
5565 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5566 return non_lvalue (convert (type, arg1));
5567
5568 bit_rotate:
5569 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5570 is a rotate of A by C1 bits. */
5571 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5572 is a rotate of A by B bits. */
5573 {
5574 enum tree_code code0, code1;
5575 code0 = TREE_CODE (arg0);
5576 code1 = TREE_CODE (arg1);
5577 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5578 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5579 && operand_equal_p (TREE_OPERAND (arg0, 0),
5580 TREE_OPERAND (arg1, 0), 0)
5581 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5582 {
5583 tree tree01, tree11;
5584 enum tree_code code01, code11;
5585
5586 tree01 = TREE_OPERAND (arg0, 1);
5587 tree11 = TREE_OPERAND (arg1, 1);
5588 STRIP_NOPS (tree01);
5589 STRIP_NOPS (tree11);
5590 code01 = TREE_CODE (tree01);
5591 code11 = TREE_CODE (tree11);
5592 if (code01 == INTEGER_CST
5593 && code11 == INTEGER_CST
5594 && TREE_INT_CST_HIGH (tree01) == 0
5595 && TREE_INT_CST_HIGH (tree11) == 0
5596 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5597 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5598 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5599 code0 == LSHIFT_EXPR ? tree01 : tree11);
5600 else if (code11 == MINUS_EXPR)
5601 {
5602 tree tree110, tree111;
5603 tree110 = TREE_OPERAND (tree11, 0);
5604 tree111 = TREE_OPERAND (tree11, 1);
5605 STRIP_NOPS (tree110);
5606 STRIP_NOPS (tree111);
5607 if (TREE_CODE (tree110) == INTEGER_CST
5608 && 0 == compare_tree_int (tree110,
5609 TYPE_PRECISION
5610 (TREE_TYPE (TREE_OPERAND
5611 (arg0, 0))))
5612 && operand_equal_p (tree01, tree111, 0))
5613 return build ((code0 == LSHIFT_EXPR
5614 ? LROTATE_EXPR
5615 : RROTATE_EXPR),
5616 type, TREE_OPERAND (arg0, 0), tree01);
5617 }
5618 else if (code01 == MINUS_EXPR)
5619 {
5620 tree tree010, tree011;
5621 tree010 = TREE_OPERAND (tree01, 0);
5622 tree011 = TREE_OPERAND (tree01, 1);
5623 STRIP_NOPS (tree010);
5624 STRIP_NOPS (tree011);
5625 if (TREE_CODE (tree010) == INTEGER_CST
5626 && 0 == compare_tree_int (tree010,
5627 TYPE_PRECISION
5628 (TREE_TYPE (TREE_OPERAND
5629 (arg0, 0))))
5630 && operand_equal_p (tree11, tree011, 0))
5631 return build ((code0 != LSHIFT_EXPR
5632 ? LROTATE_EXPR
5633 : RROTATE_EXPR),
5634 type, TREE_OPERAND (arg0, 0), tree11);
5635 }
5636 }
5637 }
5638
5639 associate:
5640 /* In most languages, can't associate operations on floats through
5641 parentheses. Rather than remember where the parentheses were, we
5642 don't associate floats at all. It shouldn't matter much. However,
5643 associating multiplications is only very slightly inaccurate, so do
5644 that if -funsafe-math-optimizations is specified. */
5645
5646 if (! wins
5647 && (! FLOAT_TYPE_P (type)
5648 || (flag_unsafe_math_optimizations && code == MULT_EXPR)))
5649 {
5650 tree var0, con0, lit0, minus_lit0;
5651 tree var1, con1, lit1, minus_lit1;
5652
5653 /* Split both trees into variables, constants, and literals. Then
5654 associate each group together, the constants with literals,
5655 then the result with variables. This increases the chances of
5656 literals being recombined later and of generating relocatable
5657 expressions for the sum of a constant and literal. */
5658 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
5659 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
5660 code == MINUS_EXPR);
5661
5662 /* Only do something if we found more than two objects. Otherwise,
5663 nothing has changed and we risk infinite recursion. */
5664 if (2 < ((var0 != 0) + (var1 != 0)
5665 + (con0 != 0) + (con1 != 0)
5666 + (lit0 != 0) + (lit1 != 0)
5667 + (minus_lit0 != 0) + (minus_lit1 != 0)))
5668 {
5669 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
5670 if (code == MINUS_EXPR)
5671 code = PLUS_EXPR;
5672
5673 var0 = associate_trees (var0, var1, code, type);
5674 con0 = associate_trees (con0, con1, code, type);
5675 lit0 = associate_trees (lit0, lit1, code, type);
5676 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
5677
5678 /* Preserve the MINUS_EXPR if the negative part of the literal is
5679 greater than the positive part. Otherwise, the multiplicative
5680 folding code (i.e extract_muldiv) may be fooled in case
5681 unsigned constants are substracted, like in the following
5682 example: ((X*2 + 4) - 8U)/2. */
5683 if (minus_lit0 && lit0)
5684 {
5685 if (tree_int_cst_lt (lit0, minus_lit0))
5686 {
5687 minus_lit0 = associate_trees (minus_lit0, lit0,
5688 MINUS_EXPR, type);
5689 lit0 = 0;
5690 }
5691 else
5692 {
5693 lit0 = associate_trees (lit0, minus_lit0,
5694 MINUS_EXPR, type);
5695 minus_lit0 = 0;
5696 }
5697 }
5698 if (minus_lit0)
5699 {
5700 if (con0 == 0)
5701 return convert (type, associate_trees (var0, minus_lit0,
5702 MINUS_EXPR, type));
5703 else
5704 {
5705 con0 = associate_trees (con0, minus_lit0,
5706 MINUS_EXPR, type);
5707 return convert (type, associate_trees (var0, con0,
5708 PLUS_EXPR, type));
5709 }
5710 }
5711
5712 con0 = associate_trees (con0, lit0, code, type);
5713 return convert (type, associate_trees (var0, con0, code, type));
5714 }
5715 }
5716
5717 binary:
5718 if (wins)
5719 t1 = const_binop (code, arg0, arg1, 0);
5720 if (t1 != NULL_TREE)
5721 {
5722 /* The return value should always have
5723 the same type as the original expression. */
5724 if (TREE_TYPE (t1) != TREE_TYPE (t))
5725 t1 = convert (TREE_TYPE (t), t1);
5726
5727 return t1;
5728 }
5729 return t;
5730
5731 case MINUS_EXPR:
5732 /* A - (-B) -> A + B */
5733 if (TREE_CODE (arg1) == NEGATE_EXPR)
5734 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5735 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
5736 if (TREE_CODE (arg0) == NEGATE_EXPR
5737 && FLOAT_TYPE_P (type)
5738 && negate_expr_p (arg1)
5739 && (! TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
5740 && (! TREE_SIDE_EFFECTS (arg1) || TREE_CONSTANT (arg0)))
5741 return fold (build (MINUS_EXPR, type, negate_expr (arg1),
5742 TREE_OPERAND (arg0, 0)));
5743
5744 if (! FLOAT_TYPE_P (type))
5745 {
5746 if (! wins && integer_zerop (arg0))
5747 return negate_expr (convert (type, arg1));
5748 if (integer_zerop (arg1))
5749 return non_lvalue (convert (type, arg0));
5750
5751 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
5752 about the case where C is a constant, just try one of the
5753 four possibilities. */
5754
5755 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
5756 && operand_equal_p (TREE_OPERAND (arg0, 1),
5757 TREE_OPERAND (arg1, 1), 0))
5758 return fold (build (MULT_EXPR, type,
5759 fold (build (MINUS_EXPR, type,
5760 TREE_OPERAND (arg0, 0),
5761 TREE_OPERAND (arg1, 0))),
5762 TREE_OPERAND (arg0, 1)));
5763
5764 /* Fold A - (A & B) into ~B & A. */
5765 if (!TREE_SIDE_EFFECTS (arg0)
5766 && TREE_CODE (arg1) == BIT_AND_EXPR)
5767 {
5768 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
5769 return fold (build (BIT_AND_EXPR, type,
5770 fold (build1 (BIT_NOT_EXPR, type,
5771 TREE_OPERAND (arg1, 0))),
5772 arg0));
5773 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
5774 return fold (build (BIT_AND_EXPR, type,
5775 fold (build1 (BIT_NOT_EXPR, type,
5776 TREE_OPERAND (arg1, 1))),
5777 arg0));
5778 }
5779 }
5780
5781 /* See if ARG1 is zero and X - ARG1 reduces to X. */
5782 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
5783 return non_lvalue (convert (type, arg0));
5784
5785 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
5786 ARG0 is zero and X + ARG0 reduces to X, since that would mean
5787 (-ARG1 + ARG0) reduces to -ARG1. */
5788 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5789 return negate_expr (convert (type, arg1));
5790
5791 /* Fold &x - &x. This can happen from &x.foo - &x.
5792 This is unsafe for certain floats even in non-IEEE formats.
5793 In IEEE, it is unsafe because it does wrong for NaNs.
5794 Also note that operand_equal_p is always false if an operand
5795 is volatile. */
5796
5797 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
5798 && operand_equal_p (arg0, arg1, 0))
5799 return convert (type, integer_zero_node);
5800
5801 goto associate;
5802
5803 case MULT_EXPR:
5804 /* (-A) * (-B) -> A * B */
5805 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5806 return fold (build (MULT_EXPR, type, TREE_OPERAND (arg0, 0),
5807 TREE_OPERAND (arg1, 0)));
5808
5809 if (! FLOAT_TYPE_P (type))
5810 {
5811 if (integer_zerop (arg1))
5812 return omit_one_operand (type, arg1, arg0);
5813 if (integer_onep (arg1))
5814 return non_lvalue (convert (type, arg0));
5815
5816 /* (a * (1 << b)) is (a << b) */
5817 if (TREE_CODE (arg1) == LSHIFT_EXPR
5818 && integer_onep (TREE_OPERAND (arg1, 0)))
5819 return fold (build (LSHIFT_EXPR, type, arg0,
5820 TREE_OPERAND (arg1, 1)));
5821 if (TREE_CODE (arg0) == LSHIFT_EXPR
5822 && integer_onep (TREE_OPERAND (arg0, 0)))
5823 return fold (build (LSHIFT_EXPR, type, arg1,
5824 TREE_OPERAND (arg0, 1)));
5825
5826 if (TREE_CODE (arg1) == INTEGER_CST
5827 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
5828 convert (type, arg1),
5829 code, NULL_TREE)))
5830 return convert (type, tem);
5831
5832 }
5833 else
5834 {
5835 /* Maybe fold x * 0 to 0. The expressions aren't the same
5836 when x is NaN, since x * 0 is also NaN. Nor are they the
5837 same in modes with signed zeros, since multiplying a
5838 negative value by 0 gives -0, not +0. */
5839 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
5840 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
5841 && real_zerop (arg1))
5842 return omit_one_operand (type, arg1, arg0);
5843 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
5844 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
5845 && real_onep (arg1))
5846 return non_lvalue (convert (type, arg0));
5847
5848 /* Transform x * -1.0 into -x. */
5849 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
5850 && real_minus_onep (arg1))
5851 return fold (build1 (NEGATE_EXPR, type, arg0));
5852
5853 /* x*2 is x+x */
5854 if (! wins && real_twop (arg1)
5855 && (*lang_hooks.decls.global_bindings_p) () == 0
5856 && ! contains_placeholder_p (arg0))
5857 {
5858 tree arg = save_expr (arg0);
5859 return fold (build (PLUS_EXPR, type, arg, arg));
5860 }
5861
5862 if (flag_unsafe_math_optimizations)
5863 {
5864 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
5865 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
5866
5867 /* Optimizations of sqrt(...)*sqrt(...). */
5868 if ((fcode0 == BUILT_IN_SQRT && fcode1 == BUILT_IN_SQRT)
5869 || (fcode0 == BUILT_IN_SQRTF && fcode1 == BUILT_IN_SQRTF)
5870 || (fcode0 == BUILT_IN_SQRTL && fcode1 == BUILT_IN_SQRTL))
5871 {
5872 tree sqrtfn, arg, arglist;
5873 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
5874 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
5875
5876 /* Optimize sqrt(x)*sqrt(x) as x. */
5877 if (operand_equal_p (arg00, arg10, 0)
5878 && ! HONOR_SNANS (TYPE_MODE (type)))
5879 return arg00;
5880
5881 /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y). */
5882 sqrtfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5883 arg = fold (build (MULT_EXPR, type, arg00, arg10));
5884 arglist = build_tree_list (NULL_TREE, arg);
5885 return build_function_call_expr (sqrtfn, arglist);
5886 }
5887
5888 /* Optimize exp(x)*exp(y) as exp(x+y). */
5889 if ((fcode0 == BUILT_IN_EXP && fcode1 == BUILT_IN_EXP)
5890 || (fcode0 == BUILT_IN_EXPF && fcode1 == BUILT_IN_EXPF)
5891 || (fcode0 == BUILT_IN_EXPL && fcode1 == BUILT_IN_EXPL))
5892 {
5893 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5894 tree arg = build (PLUS_EXPR, type,
5895 TREE_VALUE (TREE_OPERAND (arg0, 1)),
5896 TREE_VALUE (TREE_OPERAND (arg1, 1)));
5897 tree arglist = build_tree_list (NULL_TREE, fold (arg));
5898 return build_function_call_expr (expfn, arglist);
5899 }
5900
5901 /* Optimizations of pow(...)*pow(...). */
5902 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
5903 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
5904 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
5905 {
5906 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
5907 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
5908 1)));
5909 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
5910 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
5911 1)));
5912
5913 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
5914 if (operand_equal_p (arg01, arg11, 0))
5915 {
5916 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5917 tree arg = build (MULT_EXPR, type, arg00, arg10);
5918 tree arglist = tree_cons (NULL_TREE, fold (arg),
5919 build_tree_list (NULL_TREE,
5920 arg01));
5921 return build_function_call_expr (powfn, arglist);
5922 }
5923
5924 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
5925 if (operand_equal_p (arg00, arg10, 0))
5926 {
5927 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5928 tree arg = fold (build (PLUS_EXPR, type, arg01, arg11));
5929 tree arglist = tree_cons (NULL_TREE, arg00,
5930 build_tree_list (NULL_TREE,
5931 arg));
5932 return build_function_call_expr (powfn, arglist);
5933 }
5934 }
5935 }
5936 }
5937 goto associate;
5938
5939 case BIT_IOR_EXPR:
5940 bit_ior:
5941 if (integer_all_onesp (arg1))
5942 return omit_one_operand (type, arg1, arg0);
5943 if (integer_zerop (arg1))
5944 return non_lvalue (convert (type, arg0));
5945 t1 = distribute_bit_expr (code, type, arg0, arg1);
5946 if (t1 != NULL_TREE)
5947 return t1;
5948
5949 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
5950
5951 This results in more efficient code for machines without a NAND
5952 instruction. Combine will canonicalize to the first form
5953 which will allow use of NAND instructions provided by the
5954 backend if they exist. */
5955 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5956 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5957 {
5958 return fold (build1 (BIT_NOT_EXPR, type,
5959 build (BIT_AND_EXPR, type,
5960 TREE_OPERAND (arg0, 0),
5961 TREE_OPERAND (arg1, 0))));
5962 }
5963
5964 /* See if this can be simplified into a rotate first. If that
5965 is unsuccessful continue in the association code. */
5966 goto bit_rotate;
5967
5968 case BIT_XOR_EXPR:
5969 if (integer_zerop (arg1))
5970 return non_lvalue (convert (type, arg0));
5971 if (integer_all_onesp (arg1))
5972 return fold (build1 (BIT_NOT_EXPR, type, arg0));
5973
5974 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
5975 with a constant, and the two constants have no bits in common,
5976 we should treat this as a BIT_IOR_EXPR since this may produce more
5977 simplifications. */
5978 if (TREE_CODE (arg0) == BIT_AND_EXPR
5979 && TREE_CODE (arg1) == BIT_AND_EXPR
5980 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5981 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5982 && integer_zerop (const_binop (BIT_AND_EXPR,
5983 TREE_OPERAND (arg0, 1),
5984 TREE_OPERAND (arg1, 1), 0)))
5985 {
5986 code = BIT_IOR_EXPR;
5987 goto bit_ior;
5988 }
5989
5990 /* See if this can be simplified into a rotate first. If that
5991 is unsuccessful continue in the association code. */
5992 goto bit_rotate;
5993
5994 case BIT_AND_EXPR:
5995 bit_and:
5996 if (integer_all_onesp (arg1))
5997 return non_lvalue (convert (type, arg0));
5998 if (integer_zerop (arg1))
5999 return omit_one_operand (type, arg1, arg0);
6000 t1 = distribute_bit_expr (code, type, arg0, arg1);
6001 if (t1 != NULL_TREE)
6002 return t1;
6003 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
6004 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6005 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6006 {
6007 unsigned int prec
6008 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6009
6010 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6011 && (~TREE_INT_CST_LOW (arg1)
6012 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6013 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
6014 }
6015
6016 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6017
6018 This results in more efficient code for machines without a NOR
6019 instruction. Combine will canonicalize to the first form
6020 which will allow use of NOR instructions provided by the
6021 backend if they exist. */
6022 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6023 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6024 {
6025 return fold (build1 (BIT_NOT_EXPR, type,
6026 build (BIT_IOR_EXPR, type,
6027 TREE_OPERAND (arg0, 0),
6028 TREE_OPERAND (arg1, 0))));
6029 }
6030
6031 goto associate;
6032
6033 case BIT_ANDTC_EXPR:
6034 if (integer_all_onesp (arg0))
6035 return non_lvalue (convert (type, arg1));
6036 if (integer_zerop (arg0))
6037 return omit_one_operand (type, arg0, arg1);
6038 if (TREE_CODE (arg1) == INTEGER_CST)
6039 {
6040 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
6041 code = BIT_AND_EXPR;
6042 goto bit_and;
6043 }
6044 goto binary;
6045
6046 case RDIV_EXPR:
6047 /* Don't touch a floating-point divide by zero unless the mode
6048 of the constant can represent infinity. */
6049 if (TREE_CODE (arg1) == REAL_CST
6050 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6051 && real_zerop (arg1))
6052 return t;
6053
6054 /* (-A) / (-B) -> A / B */
6055 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
6056 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6057 TREE_OPERAND (arg1, 0)));
6058
6059 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6060 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6061 && real_onep (arg1))
6062 return non_lvalue (convert (type, arg0));
6063
6064 /* If ARG1 is a constant, we can convert this to a multiply by the
6065 reciprocal. This does not have the same rounding properties,
6066 so only do this if -funsafe-math-optimizations. We can actually
6067 always safely do it if ARG1 is a power of two, but it's hard to
6068 tell if it is or not in a portable manner. */
6069 if (TREE_CODE (arg1) == REAL_CST)
6070 {
6071 if (flag_unsafe_math_optimizations
6072 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6073 arg1, 0)))
6074 return fold (build (MULT_EXPR, type, arg0, tem));
6075 /* Find the reciprocal if optimizing and the result is exact. */
6076 else if (optimize)
6077 {
6078 REAL_VALUE_TYPE r;
6079 r = TREE_REAL_CST (arg1);
6080 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6081 {
6082 tem = build_real (type, r);
6083 return fold (build (MULT_EXPR, type, arg0, tem));
6084 }
6085 }
6086 }
6087 /* Convert A/B/C to A/(B*C). */
6088 if (flag_unsafe_math_optimizations
6089 && TREE_CODE (arg0) == RDIV_EXPR)
6090 {
6091 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6092 build (MULT_EXPR, type, TREE_OPERAND (arg0, 1),
6093 arg1)));
6094 }
6095 /* Convert A/(B/C) to (A/B)*C. */
6096 if (flag_unsafe_math_optimizations
6097 && TREE_CODE (arg1) == RDIV_EXPR)
6098 {
6099 return fold (build (MULT_EXPR, type,
6100 build (RDIV_EXPR, type, arg0,
6101 TREE_OPERAND (arg1, 0)),
6102 TREE_OPERAND (arg1, 1)));
6103 }
6104
6105 if (flag_unsafe_math_optimizations)
6106 {
6107 enum built_in_function fcode = builtin_mathfn_code (arg1);
6108 /* Optimize x/exp(y) into x*exp(-y). */
6109 if (fcode == BUILT_IN_EXP
6110 || fcode == BUILT_IN_EXPF
6111 || fcode == BUILT_IN_EXPL)
6112 {
6113 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6114 tree arg = build1 (NEGATE_EXPR, type,
6115 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6116 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6117 arg1 = build_function_call_expr (expfn, arglist);
6118 return fold (build (MULT_EXPR, type, arg0, arg1));
6119 }
6120
6121 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6122 if (fcode == BUILT_IN_POW
6123 || fcode == BUILT_IN_POWF
6124 || fcode == BUILT_IN_POWL)
6125 {
6126 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6127 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6128 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6129 tree neg11 = fold (build1 (NEGATE_EXPR, type, arg11));
6130 tree arglist = tree_cons(NULL_TREE, arg10,
6131 build_tree_list (NULL_TREE, neg11));
6132 arg1 = build_function_call_expr (powfn, arglist);
6133 return fold (build (MULT_EXPR, type, arg0, arg1));
6134 }
6135 }
6136 goto binary;
6137
6138 case TRUNC_DIV_EXPR:
6139 case ROUND_DIV_EXPR:
6140 case FLOOR_DIV_EXPR:
6141 case CEIL_DIV_EXPR:
6142 case EXACT_DIV_EXPR:
6143 if (integer_onep (arg1))
6144 return non_lvalue (convert (type, arg0));
6145 if (integer_zerop (arg1))
6146 return t;
6147
6148 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6149 operation, EXACT_DIV_EXPR.
6150
6151 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6152 At one time others generated faster code, it's not clear if they do
6153 after the last round to changes to the DIV code in expmed.c. */
6154 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
6155 && multiple_of_p (type, arg0, arg1))
6156 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
6157
6158 if (TREE_CODE (arg1) == INTEGER_CST
6159 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6160 code, NULL_TREE)))
6161 return convert (type, tem);
6162
6163 goto binary;
6164
6165 case CEIL_MOD_EXPR:
6166 case FLOOR_MOD_EXPR:
6167 case ROUND_MOD_EXPR:
6168 case TRUNC_MOD_EXPR:
6169 if (integer_onep (arg1))
6170 return omit_one_operand (type, integer_zero_node, arg0);
6171 if (integer_zerop (arg1))
6172 return t;
6173
6174 if (TREE_CODE (arg1) == INTEGER_CST
6175 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6176 code, NULL_TREE)))
6177 return convert (type, tem);
6178
6179 goto binary;
6180
6181 case LROTATE_EXPR:
6182 case RROTATE_EXPR:
6183 if (integer_all_onesp (arg0))
6184 return omit_one_operand (type, arg0, arg1);
6185 goto shift;
6186
6187 case RSHIFT_EXPR:
6188 /* Optimize -1 >> x for arithmetic right shifts. */
6189 if (integer_all_onesp (arg0) && ! TREE_UNSIGNED (type))
6190 return omit_one_operand (type, arg0, arg1);
6191 /* ... fall through ... */
6192
6193 case LSHIFT_EXPR:
6194 shift:
6195 if (integer_zerop (arg1))
6196 return non_lvalue (convert (type, arg0));
6197 if (integer_zerop (arg0))
6198 return omit_one_operand (type, arg0, arg1);
6199
6200 /* Since negative shift count is not well-defined,
6201 don't try to compute it in the compiler. */
6202 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
6203 return t;
6204 /* Rewrite an LROTATE_EXPR by a constant into an
6205 RROTATE_EXPR by a new constant. */
6206 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
6207 {
6208 TREE_SET_CODE (t, RROTATE_EXPR);
6209 code = RROTATE_EXPR;
6210 TREE_OPERAND (t, 1) = arg1
6211 = const_binop
6212 (MINUS_EXPR,
6213 convert (TREE_TYPE (arg1),
6214 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
6215 arg1, 0);
6216 if (tree_int_cst_sgn (arg1) < 0)
6217 return t;
6218 }
6219
6220 /* If we have a rotate of a bit operation with the rotate count and
6221 the second operand of the bit operation both constant,
6222 permute the two operations. */
6223 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6224 && (TREE_CODE (arg0) == BIT_AND_EXPR
6225 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
6226 || TREE_CODE (arg0) == BIT_IOR_EXPR
6227 || TREE_CODE (arg0) == BIT_XOR_EXPR)
6228 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6229 return fold (build (TREE_CODE (arg0), type,
6230 fold (build (code, type,
6231 TREE_OPERAND (arg0, 0), arg1)),
6232 fold (build (code, type,
6233 TREE_OPERAND (arg0, 1), arg1))));
6234
6235 /* Two consecutive rotates adding up to the width of the mode can
6236 be ignored. */
6237 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6238 && TREE_CODE (arg0) == RROTATE_EXPR
6239 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6240 && TREE_INT_CST_HIGH (arg1) == 0
6241 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
6242 && ((TREE_INT_CST_LOW (arg1)
6243 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
6244 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
6245 return TREE_OPERAND (arg0, 0);
6246
6247 goto binary;
6248
6249 case MIN_EXPR:
6250 if (operand_equal_p (arg0, arg1, 0))
6251 return omit_one_operand (type, arg0, arg1);
6252 if (INTEGRAL_TYPE_P (type)
6253 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
6254 return omit_one_operand (type, arg1, arg0);
6255 goto associate;
6256
6257 case MAX_EXPR:
6258 if (operand_equal_p (arg0, arg1, 0))
6259 return omit_one_operand (type, arg0, arg1);
6260 if (INTEGRAL_TYPE_P (type)
6261 && TYPE_MAX_VALUE (type)
6262 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
6263 return omit_one_operand (type, arg1, arg0);
6264 goto associate;
6265
6266 case TRUTH_NOT_EXPR:
6267 /* Note that the operand of this must be an int
6268 and its values must be 0 or 1.
6269 ("true" is a fixed value perhaps depending on the language,
6270 but we don't handle values other than 1 correctly yet.) */
6271 tem = invert_truthvalue (arg0);
6272 /* Avoid infinite recursion. */
6273 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6274 return t;
6275 return convert (type, tem);
6276
6277 case TRUTH_ANDIF_EXPR:
6278 /* Note that the operands of this must be ints
6279 and their values must be 0 or 1.
6280 ("true" is a fixed value perhaps depending on the language.) */
6281 /* If first arg is constant zero, return it. */
6282 if (integer_zerop (arg0))
6283 return convert (type, arg0);
6284 case TRUTH_AND_EXPR:
6285 /* If either arg is constant true, drop it. */
6286 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6287 return non_lvalue (convert (type, arg1));
6288 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
6289 /* Preserve sequence points. */
6290 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
6291 return non_lvalue (convert (type, arg0));
6292 /* If second arg is constant zero, result is zero, but first arg
6293 must be evaluated. */
6294 if (integer_zerop (arg1))
6295 return omit_one_operand (type, arg1, arg0);
6296 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
6297 case will be handled here. */
6298 if (integer_zerop (arg0))
6299 return omit_one_operand (type, arg0, arg1);
6300
6301 truth_andor:
6302 /* We only do these simplifications if we are optimizing. */
6303 if (!optimize)
6304 return t;
6305
6306 /* Check for things like (A || B) && (A || C). We can convert this
6307 to A || (B && C). Note that either operator can be any of the four
6308 truth and/or operations and the transformation will still be
6309 valid. Also note that we only care about order for the
6310 ANDIF and ORIF operators. If B contains side effects, this
6311 might change the truth-value of A. */
6312 if (TREE_CODE (arg0) == TREE_CODE (arg1)
6313 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
6314 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
6315 || TREE_CODE (arg0) == TRUTH_AND_EXPR
6316 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
6317 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
6318 {
6319 tree a00 = TREE_OPERAND (arg0, 0);
6320 tree a01 = TREE_OPERAND (arg0, 1);
6321 tree a10 = TREE_OPERAND (arg1, 0);
6322 tree a11 = TREE_OPERAND (arg1, 1);
6323 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
6324 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
6325 && (code == TRUTH_AND_EXPR
6326 || code == TRUTH_OR_EXPR));
6327
6328 if (operand_equal_p (a00, a10, 0))
6329 return fold (build (TREE_CODE (arg0), type, a00,
6330 fold (build (code, type, a01, a11))));
6331 else if (commutative && operand_equal_p (a00, a11, 0))
6332 return fold (build (TREE_CODE (arg0), type, a00,
6333 fold (build (code, type, a01, a10))));
6334 else if (commutative && operand_equal_p (a01, a10, 0))
6335 return fold (build (TREE_CODE (arg0), type, a01,
6336 fold (build (code, type, a00, a11))));
6337
6338 /* This case if tricky because we must either have commutative
6339 operators or else A10 must not have side-effects. */
6340
6341 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
6342 && operand_equal_p (a01, a11, 0))
6343 return fold (build (TREE_CODE (arg0), type,
6344 fold (build (code, type, a00, a10)),
6345 a01));
6346 }
6347
6348 /* See if we can build a range comparison. */
6349 if (0 != (tem = fold_range_test (t)))
6350 return tem;
6351
6352 /* Check for the possibility of merging component references. If our
6353 lhs is another similar operation, try to merge its rhs with our
6354 rhs. Then try to merge our lhs and rhs. */
6355 if (TREE_CODE (arg0) == code
6356 && 0 != (tem = fold_truthop (code, type,
6357 TREE_OPERAND (arg0, 1), arg1)))
6358 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6359
6360 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
6361 return tem;
6362
6363 return t;
6364
6365 case TRUTH_ORIF_EXPR:
6366 /* Note that the operands of this must be ints
6367 and their values must be 0 or true.
6368 ("true" is a fixed value perhaps depending on the language.) */
6369 /* If first arg is constant true, return it. */
6370 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6371 return convert (type, arg0);
6372 case TRUTH_OR_EXPR:
6373 /* If either arg is constant zero, drop it. */
6374 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
6375 return non_lvalue (convert (type, arg1));
6376 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
6377 /* Preserve sequence points. */
6378 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
6379 return non_lvalue (convert (type, arg0));
6380 /* If second arg is constant true, result is true, but we must
6381 evaluate first arg. */
6382 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
6383 return omit_one_operand (type, arg1, arg0);
6384 /* Likewise for first arg, but note this only occurs here for
6385 TRUTH_OR_EXPR. */
6386 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6387 return omit_one_operand (type, arg0, arg1);
6388 goto truth_andor;
6389
6390 case TRUTH_XOR_EXPR:
6391 /* If either arg is constant zero, drop it. */
6392 if (integer_zerop (arg0))
6393 return non_lvalue (convert (type, arg1));
6394 if (integer_zerop (arg1))
6395 return non_lvalue (convert (type, arg0));
6396 /* If either arg is constant true, this is a logical inversion. */
6397 if (integer_onep (arg0))
6398 return non_lvalue (convert (type, invert_truthvalue (arg1)));
6399 if (integer_onep (arg1))
6400 return non_lvalue (convert (type, invert_truthvalue (arg0)));
6401 return t;
6402
6403 case EQ_EXPR:
6404 case NE_EXPR:
6405 case LT_EXPR:
6406 case GT_EXPR:
6407 case LE_EXPR:
6408 case GE_EXPR:
6409 /* If one arg is a real or integer constant, put it last. */
6410 if ((TREE_CODE (arg0) == INTEGER_CST
6411 && TREE_CODE (arg1) != INTEGER_CST)
6412 || (TREE_CODE (arg0) == REAL_CST
6413 && TREE_CODE (arg0) != REAL_CST))
6414 {
6415 TREE_OPERAND (t, 0) = arg1;
6416 TREE_OPERAND (t, 1) = arg0;
6417 arg0 = TREE_OPERAND (t, 0);
6418 arg1 = TREE_OPERAND (t, 1);
6419 code = swap_tree_comparison (code);
6420 TREE_SET_CODE (t, code);
6421 }
6422
6423 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
6424 {
6425 tree targ0 = strip_float_extensions (arg0);
6426 tree targ1 = strip_float_extensions (arg1);
6427 tree newtype = TREE_TYPE (targ0);
6428
6429 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
6430 newtype = TREE_TYPE (targ1);
6431
6432 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
6433 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
6434 return fold (build (code, type, convert (newtype, targ0),
6435 convert (newtype, targ1)));
6436
6437 /* (-a) CMP (-b) -> b CMP a */
6438 if (TREE_CODE (arg0) == NEGATE_EXPR
6439 && TREE_CODE (arg1) == NEGATE_EXPR)
6440 return fold (build (code, type, TREE_OPERAND (arg1, 0),
6441 TREE_OPERAND (arg0, 0)));
6442
6443 if (TREE_CODE (arg1) == REAL_CST)
6444 {
6445 REAL_VALUE_TYPE cst;
6446 cst = TREE_REAL_CST (arg1);
6447
6448 /* (-a) CMP CST -> a swap(CMP) (-CST) */
6449 if (TREE_CODE (arg0) == NEGATE_EXPR)
6450 return
6451 fold (build (swap_tree_comparison (code), type,
6452 TREE_OPERAND (arg0, 0),
6453 build_real (TREE_TYPE (arg1),
6454 REAL_VALUE_NEGATE (cst))));
6455
6456 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
6457 /* a CMP (-0) -> a CMP 0 */
6458 if (REAL_VALUE_MINUS_ZERO (cst))
6459 return fold (build (code, type, arg0,
6460 build_real (TREE_TYPE (arg1), dconst0)));
6461
6462 /* x != NaN is always true, other ops are always false. */
6463 if (REAL_VALUE_ISNAN (cst)
6464 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
6465 {
6466 t = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
6467 return omit_one_operand (type, convert (type, t), arg0);
6468 }
6469
6470 /* Fold comparisons against infinity. */
6471 if (REAL_VALUE_ISINF (cst))
6472 {
6473 tem = fold_inf_compare (code, type, arg0, arg1);
6474 if (tem != NULL_TREE)
6475 return tem;
6476 }
6477 }
6478
6479 /* If this is a comparison of a real constant with a PLUS_EXPR
6480 or a MINUS_EXPR of a real constant, we can convert it into a
6481 comparison with a revised real constant as long as no overflow
6482 occurs when unsafe_math_optimizations are enabled. */
6483 if (flag_unsafe_math_optimizations
6484 && TREE_CODE (arg1) == REAL_CST
6485 && (TREE_CODE (arg0) == PLUS_EXPR
6486 || TREE_CODE (arg0) == MINUS_EXPR)
6487 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6488 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6489 ? MINUS_EXPR : PLUS_EXPR,
6490 arg1, TREE_OPERAND (arg0, 1), 0))
6491 && ! TREE_CONSTANT_OVERFLOW (tem))
6492 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6493
6494 /* Likewise, we can simplify a comparison of a real constant with
6495 a MINUS_EXPR whose first operand is also a real constant, i.e.
6496 (c1 - x) < c2 becomes x > c1-c2. */
6497 if (flag_unsafe_math_optimizations
6498 && TREE_CODE (arg1) == REAL_CST
6499 && TREE_CODE (arg0) == MINUS_EXPR
6500 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
6501 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
6502 arg1, 0))
6503 && ! TREE_CONSTANT_OVERFLOW (tem))
6504 return fold (build (swap_tree_comparison (code), type,
6505 TREE_OPERAND (arg0, 1), tem));
6506
6507 /* Fold comparisons against built-in math functions. */
6508 if (TREE_CODE (arg1) == REAL_CST
6509 && flag_unsafe_math_optimizations
6510 && ! flag_errno_math)
6511 {
6512 enum built_in_function fcode = builtin_mathfn_code (arg0);
6513
6514 if (fcode != END_BUILTINS)
6515 {
6516 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
6517 if (tem != NULL_TREE)
6518 return tem;
6519 }
6520 }
6521 }
6522
6523 /* Convert foo++ == CONST into ++foo == CONST + INCR.
6524 First, see if one arg is constant; find the constant arg
6525 and the other one. */
6526 {
6527 tree constop = 0, varop = NULL_TREE;
6528 int constopnum = -1;
6529
6530 if (TREE_CONSTANT (arg1))
6531 constopnum = 1, constop = arg1, varop = arg0;
6532 if (TREE_CONSTANT (arg0))
6533 constopnum = 0, constop = arg0, varop = arg1;
6534
6535 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
6536 {
6537 /* This optimization is invalid for ordered comparisons
6538 if CONST+INCR overflows or if foo+incr might overflow.
6539 This optimization is invalid for floating point due to rounding.
6540 For pointer types we assume overflow doesn't happen. */
6541 if (POINTER_TYPE_P (TREE_TYPE (varop))
6542 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6543 && (code == EQ_EXPR || code == NE_EXPR)))
6544 {
6545 tree newconst
6546 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
6547 constop, TREE_OPERAND (varop, 1)));
6548
6549 /* Do not overwrite the current varop to be a preincrement,
6550 create a new node so that we won't confuse our caller who
6551 might create trees and throw them away, reusing the
6552 arguments that they passed to build. This shows up in
6553 the THEN or ELSE parts of ?: being postincrements. */
6554 varop = build (PREINCREMENT_EXPR, TREE_TYPE (varop),
6555 TREE_OPERAND (varop, 0),
6556 TREE_OPERAND (varop, 1));
6557
6558 /* If VAROP is a reference to a bitfield, we must mask
6559 the constant by the width of the field. */
6560 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6561 && DECL_BIT_FIELD(TREE_OPERAND
6562 (TREE_OPERAND (varop, 0), 1)))
6563 {
6564 int size
6565 = TREE_INT_CST_LOW (DECL_SIZE
6566 (TREE_OPERAND
6567 (TREE_OPERAND (varop, 0), 1)));
6568 tree mask, unsigned_type;
6569 unsigned int precision;
6570 tree folded_compare;
6571
6572 /* First check whether the comparison would come out
6573 always the same. If we don't do that we would
6574 change the meaning with the masking. */
6575 if (constopnum == 0)
6576 folded_compare = fold (build (code, type, constop,
6577 TREE_OPERAND (varop, 0)));
6578 else
6579 folded_compare = fold (build (code, type,
6580 TREE_OPERAND (varop, 0),
6581 constop));
6582 if (integer_zerop (folded_compare)
6583 || integer_onep (folded_compare))
6584 return omit_one_operand (type, folded_compare, varop);
6585
6586 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
6587 precision = TYPE_PRECISION (unsigned_type);
6588 mask = build_int_2 (~0, ~0);
6589 TREE_TYPE (mask) = unsigned_type;
6590 force_fit_type (mask, 0);
6591 mask = const_binop (RSHIFT_EXPR, mask,
6592 size_int (precision - size), 0);
6593 newconst = fold (build (BIT_AND_EXPR,
6594 TREE_TYPE (varop), newconst,
6595 convert (TREE_TYPE (varop),
6596 mask)));
6597 }
6598
6599 t = build (code, type,
6600 (constopnum == 0) ? newconst : varop,
6601 (constopnum == 1) ? newconst : varop);
6602 return t;
6603 }
6604 }
6605 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
6606 {
6607 if (POINTER_TYPE_P (TREE_TYPE (varop))
6608 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6609 && (code == EQ_EXPR || code == NE_EXPR)))
6610 {
6611 tree newconst
6612 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
6613 constop, TREE_OPERAND (varop, 1)));
6614
6615 /* Do not overwrite the current varop to be a predecrement,
6616 create a new node so that we won't confuse our caller who
6617 might create trees and throw them away, reusing the
6618 arguments that they passed to build. This shows up in
6619 the THEN or ELSE parts of ?: being postdecrements. */
6620 varop = build (PREDECREMENT_EXPR, TREE_TYPE (varop),
6621 TREE_OPERAND (varop, 0),
6622 TREE_OPERAND (varop, 1));
6623
6624 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6625 && DECL_BIT_FIELD(TREE_OPERAND
6626 (TREE_OPERAND (varop, 0), 1)))
6627 {
6628 int size
6629 = TREE_INT_CST_LOW (DECL_SIZE
6630 (TREE_OPERAND
6631 (TREE_OPERAND (varop, 0), 1)));
6632 tree mask, unsigned_type;
6633 unsigned int precision;
6634 tree folded_compare;
6635
6636 if (constopnum == 0)
6637 folded_compare = fold (build (code, type, constop,
6638 TREE_OPERAND (varop, 0)));
6639 else
6640 folded_compare = fold (build (code, type,
6641 TREE_OPERAND (varop, 0),
6642 constop));
6643 if (integer_zerop (folded_compare)
6644 || integer_onep (folded_compare))
6645 return omit_one_operand (type, folded_compare, varop);
6646
6647 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
6648 precision = TYPE_PRECISION (unsigned_type);
6649 mask = build_int_2 (~0, ~0);
6650 TREE_TYPE (mask) = TREE_TYPE (varop);
6651 force_fit_type (mask, 0);
6652 mask = const_binop (RSHIFT_EXPR, mask,
6653 size_int (precision - size), 0);
6654 newconst = fold (build (BIT_AND_EXPR,
6655 TREE_TYPE (varop), newconst,
6656 convert (TREE_TYPE (varop),
6657 mask)));
6658 }
6659
6660 t = build (code, type,
6661 (constopnum == 0) ? newconst : varop,
6662 (constopnum == 1) ? newconst : varop);
6663 return t;
6664 }
6665 }
6666 }
6667
6668 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
6669 This transformation affects the cases which are handled in later
6670 optimizations involving comparisons with non-negative constants. */
6671 if (TREE_CODE (arg1) == INTEGER_CST
6672 && TREE_CODE (arg0) != INTEGER_CST
6673 && tree_int_cst_sgn (arg1) > 0)
6674 {
6675 switch (code)
6676 {
6677 case GE_EXPR:
6678 code = GT_EXPR;
6679 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6680 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6681 break;
6682
6683 case LT_EXPR:
6684 code = LE_EXPR;
6685 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6686 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6687 break;
6688
6689 default:
6690 break;
6691 }
6692 }
6693
6694 /* Comparisons with the highest or lowest possible integer of
6695 the specified size will have known values. */
6696 {
6697 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
6698
6699 if (TREE_CODE (arg1) == INTEGER_CST
6700 && ! TREE_CONSTANT_OVERFLOW (arg1)
6701 && width <= HOST_BITS_PER_WIDE_INT
6702 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6703 || POINTER_TYPE_P (TREE_TYPE (arg1))))
6704 {
6705 unsigned HOST_WIDE_INT signed_max;
6706 unsigned HOST_WIDE_INT max, min;
6707
6708 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
6709
6710 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6711 {
6712 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
6713 min = 0;
6714 }
6715 else
6716 {
6717 max = signed_max;
6718 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
6719 }
6720
6721 if (TREE_INT_CST_HIGH (arg1) == 0
6722 && TREE_INT_CST_LOW (arg1) == max)
6723 switch (code)
6724 {
6725 case GT_EXPR:
6726 return omit_one_operand (type,
6727 convert (type, integer_zero_node),
6728 arg0);
6729 case GE_EXPR:
6730 code = EQ_EXPR;
6731 TREE_SET_CODE (t, EQ_EXPR);
6732 break;
6733 case LE_EXPR:
6734 return omit_one_operand (type,
6735 convert (type, integer_one_node),
6736 arg0);
6737 case LT_EXPR:
6738 code = NE_EXPR;
6739 TREE_SET_CODE (t, NE_EXPR);
6740 break;
6741
6742 /* The GE_EXPR and LT_EXPR cases above are not normally
6743 reached because of previous transformations. */
6744
6745 default:
6746 break;
6747 }
6748 else if (TREE_INT_CST_HIGH (arg1) == 0
6749 && TREE_INT_CST_LOW (arg1) == max - 1)
6750 switch (code)
6751 {
6752 case GT_EXPR:
6753 code = EQ_EXPR;
6754 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
6755 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6756 break;
6757 case LE_EXPR:
6758 code = NE_EXPR;
6759 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
6760 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6761 break;
6762 default:
6763 break;
6764 }
6765 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
6766 && TREE_INT_CST_LOW (arg1) == min)
6767 switch (code)
6768 {
6769 case LT_EXPR:
6770 return omit_one_operand (type,
6771 convert (type, integer_zero_node),
6772 arg0);
6773 case LE_EXPR:
6774 code = EQ_EXPR;
6775 TREE_SET_CODE (t, EQ_EXPR);
6776 break;
6777
6778 case GE_EXPR:
6779 return omit_one_operand (type,
6780 convert (type, integer_one_node),
6781 arg0);
6782 case GT_EXPR:
6783 code = NE_EXPR;
6784 TREE_SET_CODE (t, NE_EXPR);
6785 break;
6786
6787 default:
6788 break;
6789 }
6790 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
6791 && TREE_INT_CST_LOW (arg1) == min + 1)
6792 switch (code)
6793 {
6794 case GE_EXPR:
6795 code = NE_EXPR;
6796 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6797 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6798 break;
6799 case LT_EXPR:
6800 code = EQ_EXPR;
6801 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6802 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6803 break;
6804 default:
6805 break;
6806 }
6807
6808 else if (TREE_INT_CST_HIGH (arg1) == 0
6809 && TREE_INT_CST_LOW (arg1) == signed_max
6810 && TREE_UNSIGNED (TREE_TYPE (arg1))
6811 /* signed_type does not work on pointer types. */
6812 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
6813 {
6814 /* The following case also applies to X < signed_max+1
6815 and X >= signed_max+1 because previous transformations. */
6816 if (code == LE_EXPR || code == GT_EXPR)
6817 {
6818 tree st0, st1;
6819 st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
6820 st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
6821 return fold
6822 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
6823 type, convert (st0, arg0),
6824 convert (st1, integer_zero_node)));
6825 }
6826 }
6827 }
6828 }
6829
6830 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
6831 a MINUS_EXPR of a constant, we can convert it into a comparison with
6832 a revised constant as long as no overflow occurs. */
6833 if ((code == EQ_EXPR || code == NE_EXPR)
6834 && TREE_CODE (arg1) == INTEGER_CST
6835 && (TREE_CODE (arg0) == PLUS_EXPR
6836 || TREE_CODE (arg0) == MINUS_EXPR)
6837 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6838 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6839 ? MINUS_EXPR : PLUS_EXPR,
6840 arg1, TREE_OPERAND (arg0, 1), 0))
6841 && ! TREE_CONSTANT_OVERFLOW (tem))
6842 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6843
6844 /* Similarly for a NEGATE_EXPR. */
6845 else if ((code == EQ_EXPR || code == NE_EXPR)
6846 && TREE_CODE (arg0) == NEGATE_EXPR
6847 && TREE_CODE (arg1) == INTEGER_CST
6848 && 0 != (tem = negate_expr (arg1))
6849 && TREE_CODE (tem) == INTEGER_CST
6850 && ! TREE_CONSTANT_OVERFLOW (tem))
6851 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6852
6853 /* If we have X - Y == 0, we can convert that to X == Y and similarly
6854 for !=. Don't do this for ordered comparisons due to overflow. */
6855 else if ((code == NE_EXPR || code == EQ_EXPR)
6856 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
6857 return fold (build (code, type,
6858 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
6859
6860 /* If we are widening one operand of an integer comparison,
6861 see if the other operand is similarly being widened. Perhaps we
6862 can do the comparison in the narrower type. */
6863 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
6864 && TREE_CODE (arg0) == NOP_EXPR
6865 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
6866 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
6867 && (TREE_TYPE (t1) == TREE_TYPE (tem)
6868 || (TREE_CODE (t1) == INTEGER_CST
6869 && int_fits_type_p (t1, TREE_TYPE (tem)))))
6870 return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
6871
6872 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
6873 constant, we can simplify it. */
6874 else if (TREE_CODE (arg1) == INTEGER_CST
6875 && (TREE_CODE (arg0) == MIN_EXPR
6876 || TREE_CODE (arg0) == MAX_EXPR)
6877 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6878 return optimize_minmax_comparison (t);
6879
6880 /* If we are comparing an ABS_EXPR with a constant, we can
6881 convert all the cases into explicit comparisons, but they may
6882 well not be faster than doing the ABS and one comparison.
6883 But ABS (X) <= C is a range comparison, which becomes a subtraction
6884 and a comparison, and is probably faster. */
6885 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6886 && TREE_CODE (arg0) == ABS_EXPR
6887 && ! TREE_SIDE_EFFECTS (arg0)
6888 && (0 != (tem = negate_expr (arg1)))
6889 && TREE_CODE (tem) == INTEGER_CST
6890 && ! TREE_CONSTANT_OVERFLOW (tem))
6891 return fold (build (TRUTH_ANDIF_EXPR, type,
6892 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
6893 build (LE_EXPR, type,
6894 TREE_OPERAND (arg0, 0), arg1)));
6895
6896 /* If this is an EQ or NE comparison with zero and ARG0 is
6897 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
6898 two operations, but the latter can be done in one less insn
6899 on machines that have only two-operand insns or on which a
6900 constant cannot be the first operand. */
6901 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
6902 && TREE_CODE (arg0) == BIT_AND_EXPR)
6903 {
6904 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
6905 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
6906 return
6907 fold (build (code, type,
6908 build (BIT_AND_EXPR, TREE_TYPE (arg0),
6909 build (RSHIFT_EXPR,
6910 TREE_TYPE (TREE_OPERAND (arg0, 0)),
6911 TREE_OPERAND (arg0, 1),
6912 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
6913 convert (TREE_TYPE (arg0),
6914 integer_one_node)),
6915 arg1));
6916 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
6917 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
6918 return
6919 fold (build (code, type,
6920 build (BIT_AND_EXPR, TREE_TYPE (arg0),
6921 build (RSHIFT_EXPR,
6922 TREE_TYPE (TREE_OPERAND (arg0, 1)),
6923 TREE_OPERAND (arg0, 0),
6924 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
6925 convert (TREE_TYPE (arg0),
6926 integer_one_node)),
6927 arg1));
6928 }
6929
6930 /* If this is an NE or EQ comparison of zero against the result of a
6931 signed MOD operation whose second operand is a power of 2, make
6932 the MOD operation unsigned since it is simpler and equivalent. */
6933 if ((code == NE_EXPR || code == EQ_EXPR)
6934 && integer_zerop (arg1)
6935 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
6936 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
6937 || TREE_CODE (arg0) == CEIL_MOD_EXPR
6938 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
6939 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
6940 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6941 {
6942 tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
6943 tree newmod = build (TREE_CODE (arg0), newtype,
6944 convert (newtype, TREE_OPERAND (arg0, 0)),
6945 convert (newtype, TREE_OPERAND (arg0, 1)));
6946
6947 return build (code, type, newmod, convert (newtype, arg1));
6948 }
6949
6950 /* If this is an NE comparison of zero with an AND of one, remove the
6951 comparison since the AND will give the correct value. */
6952 if (code == NE_EXPR && integer_zerop (arg1)
6953 && TREE_CODE (arg0) == BIT_AND_EXPR
6954 && integer_onep (TREE_OPERAND (arg0, 1)))
6955 return convert (type, arg0);
6956
6957 /* If we have (A & C) == C where C is a power of 2, convert this into
6958 (A & C) != 0. Similarly for NE_EXPR. */
6959 if ((code == EQ_EXPR || code == NE_EXPR)
6960 && TREE_CODE (arg0) == BIT_AND_EXPR
6961 && integer_pow2p (TREE_OPERAND (arg0, 1))
6962 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
6963 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
6964 arg0, integer_zero_node));
6965
6966 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6967 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6968 if ((code == EQ_EXPR || code == NE_EXPR)
6969 && TREE_CODE (arg0) == BIT_AND_EXPR
6970 && integer_zerop (arg1))
6971 {
6972 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0),
6973 TREE_OPERAND (arg0, 1));
6974 if (arg00 != NULL_TREE)
6975 {
6976 tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
6977 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
6978 convert (stype, arg00),
6979 convert (stype, integer_zero_node)));
6980 }
6981 }
6982
6983 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
6984 and similarly for >= into !=. */
6985 if ((code == LT_EXPR || code == GE_EXPR)
6986 && TREE_UNSIGNED (TREE_TYPE (arg0))
6987 && TREE_CODE (arg1) == LSHIFT_EXPR
6988 && integer_onep (TREE_OPERAND (arg1, 0)))
6989 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6990 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6991 TREE_OPERAND (arg1, 1)),
6992 convert (TREE_TYPE (arg0), integer_zero_node));
6993
6994 else if ((code == LT_EXPR || code == GE_EXPR)
6995 && TREE_UNSIGNED (TREE_TYPE (arg0))
6996 && (TREE_CODE (arg1) == NOP_EXPR
6997 || TREE_CODE (arg1) == CONVERT_EXPR)
6998 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
6999 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7000 return
7001 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7002 convert (TREE_TYPE (arg0),
7003 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7004 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
7005 convert (TREE_TYPE (arg0), integer_zero_node));
7006
7007 /* Simplify comparison of something with itself. (For IEEE
7008 floating-point, we can only do some of these simplifications.) */
7009 if (operand_equal_p (arg0, arg1, 0))
7010 {
7011 switch (code)
7012 {
7013 case EQ_EXPR:
7014 case GE_EXPR:
7015 case LE_EXPR:
7016 if (! FLOAT_TYPE_P (TREE_TYPE (arg0)))
7017 return constant_boolean_node (1, type);
7018 code = EQ_EXPR;
7019 TREE_SET_CODE (t, code);
7020 break;
7021
7022 case NE_EXPR:
7023 /* For NE, we can only do this simplification if integer. */
7024 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7025 break;
7026 /* ... fall through ... */
7027 case GT_EXPR:
7028 case LT_EXPR:
7029 return constant_boolean_node (0, type);
7030 default:
7031 abort ();
7032 }
7033 }
7034
7035 /* If we are comparing an expression that just has comparisons
7036 of two integer values, arithmetic expressions of those comparisons,
7037 and constants, we can simplify it. There are only three cases
7038 to check: the two values can either be equal, the first can be
7039 greater, or the second can be greater. Fold the expression for
7040 those three values. Since each value must be 0 or 1, we have
7041 eight possibilities, each of which corresponds to the constant 0
7042 or 1 or one of the six possible comparisons.
7043
7044 This handles common cases like (a > b) == 0 but also handles
7045 expressions like ((x > y) - (y > x)) > 0, which supposedly
7046 occur in macroized code. */
7047
7048 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7049 {
7050 tree cval1 = 0, cval2 = 0;
7051 int save_p = 0;
7052
7053 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7054 /* Don't handle degenerate cases here; they should already
7055 have been handled anyway. */
7056 && cval1 != 0 && cval2 != 0
7057 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7058 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7059 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7060 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7061 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7062 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7063 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7064 {
7065 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7066 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7067
7068 /* We can't just pass T to eval_subst in case cval1 or cval2
7069 was the same as ARG1. */
7070
7071 tree high_result
7072 = fold (build (code, type,
7073 eval_subst (arg0, cval1, maxval, cval2, minval),
7074 arg1));
7075 tree equal_result
7076 = fold (build (code, type,
7077 eval_subst (arg0, cval1, maxval, cval2, maxval),
7078 arg1));
7079 tree low_result
7080 = fold (build (code, type,
7081 eval_subst (arg0, cval1, minval, cval2, maxval),
7082 arg1));
7083
7084 /* All three of these results should be 0 or 1. Confirm they
7085 are. Then use those values to select the proper code
7086 to use. */
7087
7088 if ((integer_zerop (high_result)
7089 || integer_onep (high_result))
7090 && (integer_zerop (equal_result)
7091 || integer_onep (equal_result))
7092 && (integer_zerop (low_result)
7093 || integer_onep (low_result)))
7094 {
7095 /* Make a 3-bit mask with the high-order bit being the
7096 value for `>', the next for '=', and the low for '<'. */
7097 switch ((integer_onep (high_result) * 4)
7098 + (integer_onep (equal_result) * 2)
7099 + integer_onep (low_result))
7100 {
7101 case 0:
7102 /* Always false. */
7103 return omit_one_operand (type, integer_zero_node, arg0);
7104 case 1:
7105 code = LT_EXPR;
7106 break;
7107 case 2:
7108 code = EQ_EXPR;
7109 break;
7110 case 3:
7111 code = LE_EXPR;
7112 break;
7113 case 4:
7114 code = GT_EXPR;
7115 break;
7116 case 5:
7117 code = NE_EXPR;
7118 break;
7119 case 6:
7120 code = GE_EXPR;
7121 break;
7122 case 7:
7123 /* Always true. */
7124 return omit_one_operand (type, integer_one_node, arg0);
7125 }
7126
7127 t = build (code, type, cval1, cval2);
7128 if (save_p)
7129 return save_expr (t);
7130 else
7131 return fold (t);
7132 }
7133 }
7134 }
7135
7136 /* If this is a comparison of a field, we may be able to simplify it. */
7137 if (((TREE_CODE (arg0) == COMPONENT_REF
7138 && (*lang_hooks.can_use_bit_fields_p) ())
7139 || TREE_CODE (arg0) == BIT_FIELD_REF)
7140 && (code == EQ_EXPR || code == NE_EXPR)
7141 /* Handle the constant case even without -O
7142 to make sure the warnings are given. */
7143 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
7144 {
7145 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
7146 return t1 ? t1 : t;
7147 }
7148
7149 /* If this is a comparison of complex values and either or both sides
7150 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7151 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7152 This may prevent needless evaluations. */
7153 if ((code == EQ_EXPR || code == NE_EXPR)
7154 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
7155 && (TREE_CODE (arg0) == COMPLEX_EXPR
7156 || TREE_CODE (arg1) == COMPLEX_EXPR
7157 || TREE_CODE (arg0) == COMPLEX_CST
7158 || TREE_CODE (arg1) == COMPLEX_CST))
7159 {
7160 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
7161 tree real0, imag0, real1, imag1;
7162
7163 arg0 = save_expr (arg0);
7164 arg1 = save_expr (arg1);
7165 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
7166 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
7167 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
7168 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
7169
7170 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
7171 : TRUTH_ORIF_EXPR),
7172 type,
7173 fold (build (code, type, real0, real1)),
7174 fold (build (code, type, imag0, imag1))));
7175 }
7176
7177 /* Optimize comparisons of strlen vs zero to a compare of the
7178 first character of the string vs zero. To wit,
7179 strlen(ptr) == 0 => *ptr == 0
7180 strlen(ptr) != 0 => *ptr != 0
7181 Other cases should reduce to one of these two (or a constant)
7182 due to the return value of strlen being unsigned. */
7183 if ((code == EQ_EXPR || code == NE_EXPR)
7184 && integer_zerop (arg1)
7185 && TREE_CODE (arg0) == CALL_EXPR
7186 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR)
7187 {
7188 tree fndecl = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7189 tree arglist;
7190
7191 if (TREE_CODE (fndecl) == FUNCTION_DECL
7192 && DECL_BUILT_IN (fndecl)
7193 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
7194 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
7195 && (arglist = TREE_OPERAND (arg0, 1))
7196 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
7197 && ! TREE_CHAIN (arglist))
7198 return fold (build (code, type,
7199 build1 (INDIRECT_REF, char_type_node,
7200 TREE_VALUE(arglist)),
7201 integer_zero_node));
7202 }
7203
7204 /* From here on, the only cases we handle are when the result is
7205 known to be a constant.
7206
7207 To compute GT, swap the arguments and do LT.
7208 To compute GE, do LT and invert the result.
7209 To compute LE, swap the arguments, do LT and invert the result.
7210 To compute NE, do EQ and invert the result.
7211
7212 Therefore, the code below must handle only EQ and LT. */
7213
7214 if (code == LE_EXPR || code == GT_EXPR)
7215 {
7216 tem = arg0, arg0 = arg1, arg1 = tem;
7217 code = swap_tree_comparison (code);
7218 }
7219
7220 /* Note that it is safe to invert for real values here because we
7221 will check below in the one case that it matters. */
7222
7223 t1 = NULL_TREE;
7224 invert = 0;
7225 if (code == NE_EXPR || code == GE_EXPR)
7226 {
7227 invert = 1;
7228 code = invert_tree_comparison (code);
7229 }
7230
7231 /* Compute a result for LT or EQ if args permit;
7232 otherwise return T. */
7233 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
7234 {
7235 if (code == EQ_EXPR)
7236 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
7237 else
7238 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
7239 ? INT_CST_LT_UNSIGNED (arg0, arg1)
7240 : INT_CST_LT (arg0, arg1)),
7241 0);
7242 }
7243
7244 #if 0 /* This is no longer useful, but breaks some real code. */
7245 /* Assume a nonexplicit constant cannot equal an explicit one,
7246 since such code would be undefined anyway.
7247 Exception: on sysvr4, using #pragma weak,
7248 a label can come out as 0. */
7249 else if (TREE_CODE (arg1) == INTEGER_CST
7250 && !integer_zerop (arg1)
7251 && TREE_CONSTANT (arg0)
7252 && TREE_CODE (arg0) == ADDR_EXPR
7253 && code == EQ_EXPR)
7254 t1 = build_int_2 (0, 0);
7255 #endif
7256 /* Two real constants can be compared explicitly. */
7257 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
7258 {
7259 /* If either operand is a NaN, the result is false with two
7260 exceptions: First, an NE_EXPR is true on NaNs, but that case
7261 is already handled correctly since we will be inverting the
7262 result for NE_EXPR. Second, if we had inverted a LE_EXPR
7263 or a GE_EXPR into a LT_EXPR, we must return true so that it
7264 will be inverted into false. */
7265
7266 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
7267 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
7268 t1 = build_int_2 (invert && code == LT_EXPR, 0);
7269
7270 else if (code == EQ_EXPR)
7271 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
7272 TREE_REAL_CST (arg1)),
7273 0);
7274 else
7275 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
7276 TREE_REAL_CST (arg1)),
7277 0);
7278 }
7279
7280 if (t1 == NULL_TREE)
7281 return t;
7282
7283 if (invert)
7284 TREE_INT_CST_LOW (t1) ^= 1;
7285
7286 TREE_TYPE (t1) = type;
7287 if (TREE_CODE (type) == BOOLEAN_TYPE)
7288 return (*lang_hooks.truthvalue_conversion) (t1);
7289 return t1;
7290
7291 case COND_EXPR:
7292 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
7293 so all simple results must be passed through pedantic_non_lvalue. */
7294 if (TREE_CODE (arg0) == INTEGER_CST)
7295 return pedantic_non_lvalue
7296 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
7297 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
7298 return pedantic_omit_one_operand (type, arg1, arg0);
7299
7300 /* If the second operand is zero, invert the comparison and swap
7301 the second and third operands. Likewise if the second operand
7302 is constant and the third is not or if the third operand is
7303 equivalent to the first operand of the comparison. */
7304
7305 if (integer_zerop (arg1)
7306 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
7307 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7308 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7309 TREE_OPERAND (t, 2),
7310 TREE_OPERAND (arg0, 1))))
7311 {
7312 /* See if this can be inverted. If it can't, possibly because
7313 it was a floating-point inequality comparison, don't do
7314 anything. */
7315 tem = invert_truthvalue (arg0);
7316
7317 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7318 {
7319 t = build (code, type, tem,
7320 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
7321 arg0 = tem;
7322 /* arg1 should be the first argument of the new T. */
7323 arg1 = TREE_OPERAND (t, 1);
7324 STRIP_NOPS (arg1);
7325 }
7326 }
7327
7328 /* If we have A op B ? A : C, we may be able to convert this to a
7329 simpler expression, depending on the operation and the values
7330 of B and C. Signed zeros prevent all of these transformations,
7331 for reasons given above each one. */
7332
7333 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7334 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7335 arg1, TREE_OPERAND (arg0, 1))
7336 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
7337 {
7338 tree arg2 = TREE_OPERAND (t, 2);
7339 enum tree_code comp_code = TREE_CODE (arg0);
7340
7341 STRIP_NOPS (arg2);
7342
7343 /* If we have A op 0 ? A : -A, consider applying the following
7344 transformations:
7345
7346 A == 0? A : -A same as -A
7347 A != 0? A : -A same as A
7348 A >= 0? A : -A same as abs (A)
7349 A > 0? A : -A same as abs (A)
7350 A <= 0? A : -A same as -abs (A)
7351 A < 0? A : -A same as -abs (A)
7352
7353 None of these transformations work for modes with signed
7354 zeros. If A is +/-0, the first two transformations will
7355 change the sign of the result (from +0 to -0, or vice
7356 versa). The last four will fix the sign of the result,
7357 even though the original expressions could be positive or
7358 negative, depending on the sign of A.
7359
7360 Note that all these transformations are correct if A is
7361 NaN, since the two alternatives (A and -A) are also NaNs. */
7362 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
7363 ? real_zerop (TREE_OPERAND (arg0, 1))
7364 : integer_zerop (TREE_OPERAND (arg0, 1)))
7365 && TREE_CODE (arg2) == NEGATE_EXPR
7366 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
7367 switch (comp_code)
7368 {
7369 case EQ_EXPR:
7370 return
7371 pedantic_non_lvalue
7372 (convert (type,
7373 negate_expr
7374 (convert (TREE_TYPE (TREE_OPERAND (t, 1)),
7375 arg1))));
7376 case NE_EXPR:
7377 return pedantic_non_lvalue (convert (type, arg1));
7378 case GE_EXPR:
7379 case GT_EXPR:
7380 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7381 arg1 = convert ((*lang_hooks.types.signed_type)
7382 (TREE_TYPE (arg1)), arg1);
7383 return pedantic_non_lvalue
7384 (convert (type, fold (build1 (ABS_EXPR,
7385 TREE_TYPE (arg1), arg1))));
7386 case LE_EXPR:
7387 case LT_EXPR:
7388 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7389 arg1 = convert ((lang_hooks.types.signed_type)
7390 (TREE_TYPE (arg1)), arg1);
7391 return pedantic_non_lvalue
7392 (negate_expr (convert (type,
7393 fold (build1 (ABS_EXPR,
7394 TREE_TYPE (arg1),
7395 arg1)))));
7396 default:
7397 abort ();
7398 }
7399
7400 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
7401 A == 0 ? A : 0 is always 0 unless A is -0. Note that
7402 both transformations are correct when A is NaN: A != 0
7403 is then true, and A == 0 is false. */
7404
7405 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
7406 {
7407 if (comp_code == NE_EXPR)
7408 return pedantic_non_lvalue (convert (type, arg1));
7409 else if (comp_code == EQ_EXPR)
7410 return pedantic_non_lvalue (convert (type, integer_zero_node));
7411 }
7412
7413 /* Try some transformations of A op B ? A : B.
7414
7415 A == B? A : B same as B
7416 A != B? A : B same as A
7417 A >= B? A : B same as max (A, B)
7418 A > B? A : B same as max (B, A)
7419 A <= B? A : B same as min (A, B)
7420 A < B? A : B same as min (B, A)
7421
7422 As above, these transformations don't work in the presence
7423 of signed zeros. For example, if A and B are zeros of
7424 opposite sign, the first two transformations will change
7425 the sign of the result. In the last four, the original
7426 expressions give different results for (A=+0, B=-0) and
7427 (A=-0, B=+0), but the transformed expressions do not.
7428
7429 The first two transformations are correct if either A or B
7430 is a NaN. In the first transformation, the condition will
7431 be false, and B will indeed be chosen. In the case of the
7432 second transformation, the condition A != B will be true,
7433 and A will be chosen.
7434
7435 The conversions to max() and min() are not correct if B is
7436 a number and A is not. The conditions in the original
7437 expressions will be false, so all four give B. The min()
7438 and max() versions would give a NaN instead. */
7439 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
7440 arg2, TREE_OPERAND (arg0, 0)))
7441 {
7442 tree comp_op0 = TREE_OPERAND (arg0, 0);
7443 tree comp_op1 = TREE_OPERAND (arg0, 1);
7444 tree comp_type = TREE_TYPE (comp_op0);
7445
7446 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
7447 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
7448 {
7449 comp_type = type;
7450 comp_op0 = arg1;
7451 comp_op1 = arg2;
7452 }
7453
7454 switch (comp_code)
7455 {
7456 case EQ_EXPR:
7457 return pedantic_non_lvalue (convert (type, arg2));
7458 case NE_EXPR:
7459 return pedantic_non_lvalue (convert (type, arg1));
7460 case LE_EXPR:
7461 case LT_EXPR:
7462 /* In C++ a ?: expression can be an lvalue, so put the
7463 operand which will be used if they are equal first
7464 so that we can convert this back to the
7465 corresponding COND_EXPR. */
7466 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
7467 return pedantic_non_lvalue
7468 (convert (type, fold (build (MIN_EXPR, comp_type,
7469 (comp_code == LE_EXPR
7470 ? comp_op0 : comp_op1),
7471 (comp_code == LE_EXPR
7472 ? comp_op1 : comp_op0)))));
7473 break;
7474 case GE_EXPR:
7475 case GT_EXPR:
7476 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
7477 return pedantic_non_lvalue
7478 (convert (type, fold (build (MAX_EXPR, comp_type,
7479 (comp_code == GE_EXPR
7480 ? comp_op0 : comp_op1),
7481 (comp_code == GE_EXPR
7482 ? comp_op1 : comp_op0)))));
7483 break;
7484 default:
7485 abort ();
7486 }
7487 }
7488
7489 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
7490 we might still be able to simplify this. For example,
7491 if C1 is one less or one more than C2, this might have started
7492 out as a MIN or MAX and been transformed by this function.
7493 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
7494
7495 if (INTEGRAL_TYPE_P (type)
7496 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7497 && TREE_CODE (arg2) == INTEGER_CST)
7498 switch (comp_code)
7499 {
7500 case EQ_EXPR:
7501 /* We can replace A with C1 in this case. */
7502 arg1 = convert (type, TREE_OPERAND (arg0, 1));
7503 t = build (code, type, TREE_OPERAND (t, 0), arg1,
7504 TREE_OPERAND (t, 2));
7505 break;
7506
7507 case LT_EXPR:
7508 /* If C1 is C2 + 1, this is min(A, C2). */
7509 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
7510 && operand_equal_p (TREE_OPERAND (arg0, 1),
7511 const_binop (PLUS_EXPR, arg2,
7512 integer_one_node, 0), 1))
7513 return pedantic_non_lvalue
7514 (fold (build (MIN_EXPR, type, arg1, arg2)));
7515 break;
7516
7517 case LE_EXPR:
7518 /* If C1 is C2 - 1, this is min(A, C2). */
7519 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
7520 && operand_equal_p (TREE_OPERAND (arg0, 1),
7521 const_binop (MINUS_EXPR, arg2,
7522 integer_one_node, 0), 1))
7523 return pedantic_non_lvalue
7524 (fold (build (MIN_EXPR, type, arg1, arg2)));
7525 break;
7526
7527 case GT_EXPR:
7528 /* If C1 is C2 - 1, this is max(A, C2). */
7529 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
7530 && operand_equal_p (TREE_OPERAND (arg0, 1),
7531 const_binop (MINUS_EXPR, arg2,
7532 integer_one_node, 0), 1))
7533 return pedantic_non_lvalue
7534 (fold (build (MAX_EXPR, type, arg1, arg2)));
7535 break;
7536
7537 case GE_EXPR:
7538 /* If C1 is C2 + 1, this is max(A, C2). */
7539 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
7540 && operand_equal_p (TREE_OPERAND (arg0, 1),
7541 const_binop (PLUS_EXPR, arg2,
7542 integer_one_node, 0), 1))
7543 return pedantic_non_lvalue
7544 (fold (build (MAX_EXPR, type, arg1, arg2)));
7545 break;
7546 case NE_EXPR:
7547 break;
7548 default:
7549 abort ();
7550 }
7551 }
7552
7553 /* If the second operand is simpler than the third, swap them
7554 since that produces better jump optimization results. */
7555 if ((TREE_CONSTANT (arg1) || DECL_P (arg1)
7556 || TREE_CODE (arg1) == SAVE_EXPR)
7557 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
7558 || DECL_P (TREE_OPERAND (t, 2))
7559 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
7560 {
7561 /* See if this can be inverted. If it can't, possibly because
7562 it was a floating-point inequality comparison, don't do
7563 anything. */
7564 tem = invert_truthvalue (arg0);
7565
7566 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7567 {
7568 t = build (code, type, tem,
7569 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
7570 arg0 = tem;
7571 /* arg1 should be the first argument of the new T. */
7572 arg1 = TREE_OPERAND (t, 1);
7573 STRIP_NOPS (arg1);
7574 }
7575 }
7576
7577 /* Convert A ? 1 : 0 to simply A. */
7578 if (integer_onep (TREE_OPERAND (t, 1))
7579 && integer_zerop (TREE_OPERAND (t, 2))
7580 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
7581 call to fold will try to move the conversion inside
7582 a COND, which will recurse. In that case, the COND_EXPR
7583 is probably the best choice, so leave it alone. */
7584 && type == TREE_TYPE (arg0))
7585 return pedantic_non_lvalue (arg0);
7586
7587 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
7588 over COND_EXPR in cases such as floating point comparisons. */
7589 if (integer_zerop (TREE_OPERAND (t, 1))
7590 && integer_onep (TREE_OPERAND (t, 2))
7591 && truth_value_p (TREE_CODE (arg0)))
7592 return pedantic_non_lvalue (convert (type,
7593 invert_truthvalue (arg0)));
7594
7595 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
7596 operation is simply A & 2. */
7597
7598 if (integer_zerop (TREE_OPERAND (t, 2))
7599 && TREE_CODE (arg0) == NE_EXPR
7600 && integer_zerop (TREE_OPERAND (arg0, 1))
7601 && integer_pow2p (arg1)
7602 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
7603 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
7604 arg1, 1))
7605 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
7606
7607 /* Convert A ? B : 0 into A && B if A and B are truth values. */
7608 if (integer_zerop (TREE_OPERAND (t, 2))
7609 && truth_value_p (TREE_CODE (arg0))
7610 && truth_value_p (TREE_CODE (arg1)))
7611 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
7612 arg0, arg1)));
7613
7614 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
7615 if (integer_onep (TREE_OPERAND (t, 2))
7616 && truth_value_p (TREE_CODE (arg0))
7617 && truth_value_p (TREE_CODE (arg1)))
7618 {
7619 /* Only perform transformation if ARG0 is easily inverted. */
7620 tem = invert_truthvalue (arg0);
7621 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7622 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
7623 tem, arg1)));
7624 }
7625
7626 return t;
7627
7628 case COMPOUND_EXPR:
7629 /* When pedantic, a compound expression can be neither an lvalue
7630 nor an integer constant expression. */
7631 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
7632 return t;
7633 /* Don't let (0, 0) be null pointer constant. */
7634 if (integer_zerop (arg1))
7635 return build1 (NOP_EXPR, type, arg1);
7636 return convert (type, arg1);
7637
7638 case COMPLEX_EXPR:
7639 if (wins)
7640 return build_complex (type, arg0, arg1);
7641 return t;
7642
7643 case REALPART_EXPR:
7644 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7645 return t;
7646 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7647 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7648 TREE_OPERAND (arg0, 1));
7649 else if (TREE_CODE (arg0) == COMPLEX_CST)
7650 return TREE_REALPART (arg0);
7651 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7652 return fold (build (TREE_CODE (arg0), type,
7653 fold (build1 (REALPART_EXPR, type,
7654 TREE_OPERAND (arg0, 0))),
7655 fold (build1 (REALPART_EXPR,
7656 type, TREE_OPERAND (arg0, 1)))));
7657 return t;
7658
7659 case IMAGPART_EXPR:
7660 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7661 return convert (type, integer_zero_node);
7662 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7663 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7664 TREE_OPERAND (arg0, 0));
7665 else if (TREE_CODE (arg0) == COMPLEX_CST)
7666 return TREE_IMAGPART (arg0);
7667 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7668 return fold (build (TREE_CODE (arg0), type,
7669 fold (build1 (IMAGPART_EXPR, type,
7670 TREE_OPERAND (arg0, 0))),
7671 fold (build1 (IMAGPART_EXPR, type,
7672 TREE_OPERAND (arg0, 1)))));
7673 return t;
7674
7675 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
7676 appropriate. */
7677 case CLEANUP_POINT_EXPR:
7678 if (! has_cleanups (arg0))
7679 return TREE_OPERAND (t, 0);
7680
7681 {
7682 enum tree_code code0 = TREE_CODE (arg0);
7683 int kind0 = TREE_CODE_CLASS (code0);
7684 tree arg00 = TREE_OPERAND (arg0, 0);
7685 tree arg01;
7686
7687 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
7688 return fold (build1 (code0, type,
7689 fold (build1 (CLEANUP_POINT_EXPR,
7690 TREE_TYPE (arg00), arg00))));
7691
7692 if (kind0 == '<' || kind0 == '2'
7693 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
7694 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
7695 || code0 == TRUTH_XOR_EXPR)
7696 {
7697 arg01 = TREE_OPERAND (arg0, 1);
7698
7699 if (TREE_CONSTANT (arg00)
7700 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
7701 && ! has_cleanups (arg00)))
7702 return fold (build (code0, type, arg00,
7703 fold (build1 (CLEANUP_POINT_EXPR,
7704 TREE_TYPE (arg01), arg01))));
7705
7706 if (TREE_CONSTANT (arg01))
7707 return fold (build (code0, type,
7708 fold (build1 (CLEANUP_POINT_EXPR,
7709 TREE_TYPE (arg00), arg00)),
7710 arg01));
7711 }
7712
7713 return t;
7714 }
7715
7716 case CALL_EXPR:
7717 /* Check for a built-in function. */
7718 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
7719 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
7720 == FUNCTION_DECL)
7721 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
7722 {
7723 tree tmp = fold_builtin (expr);
7724 if (tmp)
7725 return tmp;
7726 }
7727 return t;
7728
7729 default:
7730 return t;
7731 } /* switch (code) */
7732 }
7733
7734 /* Determine if first argument is a multiple of second argument. Return 0 if
7735 it is not, or we cannot easily determined it to be.
7736
7737 An example of the sort of thing we care about (at this point; this routine
7738 could surely be made more general, and expanded to do what the *_DIV_EXPR's
7739 fold cases do now) is discovering that
7740
7741 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7742
7743 is a multiple of
7744
7745 SAVE_EXPR (J * 8)
7746
7747 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
7748
7749 This code also handles discovering that
7750
7751 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7752
7753 is a multiple of 8 so we don't have to worry about dealing with a
7754 possible remainder.
7755
7756 Note that we *look* inside a SAVE_EXPR only to determine how it was
7757 calculated; it is not safe for fold to do much of anything else with the
7758 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
7759 at run time. For example, the latter example above *cannot* be implemented
7760 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
7761 evaluation time of the original SAVE_EXPR is not necessarily the same at
7762 the time the new expression is evaluated. The only optimization of this
7763 sort that would be valid is changing
7764
7765 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
7766
7767 divided by 8 to
7768
7769 SAVE_EXPR (I) * SAVE_EXPR (J)
7770
7771 (where the same SAVE_EXPR (J) is used in the original and the
7772 transformed version). */
7773
7774 static int
7775 multiple_of_p (type, top, bottom)
7776 tree type;
7777 tree top;
7778 tree bottom;
7779 {
7780 if (operand_equal_p (top, bottom, 0))
7781 return 1;
7782
7783 if (TREE_CODE (type) != INTEGER_TYPE)
7784 return 0;
7785
7786 switch (TREE_CODE (top))
7787 {
7788 case MULT_EXPR:
7789 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7790 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7791
7792 case PLUS_EXPR:
7793 case MINUS_EXPR:
7794 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7795 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7796
7797 case LSHIFT_EXPR:
7798 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
7799 {
7800 tree op1, t1;
7801
7802 op1 = TREE_OPERAND (top, 1);
7803 /* const_binop may not detect overflow correctly,
7804 so check for it explicitly here. */
7805 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
7806 > TREE_INT_CST_LOW (op1)
7807 && TREE_INT_CST_HIGH (op1) == 0
7808 && 0 != (t1 = convert (type,
7809 const_binop (LSHIFT_EXPR, size_one_node,
7810 op1, 0)))
7811 && ! TREE_OVERFLOW (t1))
7812 return multiple_of_p (type, t1, bottom);
7813 }
7814 return 0;
7815
7816 case NOP_EXPR:
7817 /* Can't handle conversions from non-integral or wider integral type. */
7818 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
7819 || (TYPE_PRECISION (type)
7820 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
7821 return 0;
7822
7823 /* .. fall through ... */
7824
7825 case SAVE_EXPR:
7826 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
7827
7828 case INTEGER_CST:
7829 if (TREE_CODE (bottom) != INTEGER_CST
7830 || (TREE_UNSIGNED (type)
7831 && (tree_int_cst_sgn (top) < 0
7832 || tree_int_cst_sgn (bottom) < 0)))
7833 return 0;
7834 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
7835 top, bottom, 0));
7836
7837 default:
7838 return 0;
7839 }
7840 }
7841
7842 /* Return true if `t' is known to be non-negative. */
7843
7844 int
7845 tree_expr_nonnegative_p (t)
7846 tree t;
7847 {
7848 switch (TREE_CODE (t))
7849 {
7850 case ABS_EXPR:
7851 case FFS_EXPR:
7852 case POPCOUNT_EXPR:
7853 case PARITY_EXPR:
7854 return 1;
7855
7856 case CLZ_EXPR:
7857 case CTZ_EXPR:
7858 /* These are undefined at zero. This is true even if
7859 C[LT]Z_DEFINED_VALUE_AT_ZERO is set, since what we're
7860 computing here is a user-visible property. */
7861 return 0;
7862
7863 case INTEGER_CST:
7864 return tree_int_cst_sgn (t) >= 0;
7865 case TRUNC_DIV_EXPR:
7866 case CEIL_DIV_EXPR:
7867 case FLOOR_DIV_EXPR:
7868 case ROUND_DIV_EXPR:
7869 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7870 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7871 case TRUNC_MOD_EXPR:
7872 case CEIL_MOD_EXPR:
7873 case FLOOR_MOD_EXPR:
7874 case ROUND_MOD_EXPR:
7875 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7876 case COND_EXPR:
7877 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
7878 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
7879 case COMPOUND_EXPR:
7880 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7881 case MIN_EXPR:
7882 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7883 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7884 case MAX_EXPR:
7885 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7886 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7887 case MODIFY_EXPR:
7888 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7889 case BIND_EXPR:
7890 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7891 case SAVE_EXPR:
7892 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7893 case NON_LVALUE_EXPR:
7894 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7895 case RTL_EXPR:
7896 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
7897
7898 default:
7899 if (truth_value_p (TREE_CODE (t)))
7900 /* Truth values evaluate to 0 or 1, which is nonnegative. */
7901 return 1;
7902 else
7903 /* We don't know sign of `t', so be conservative and return false. */
7904 return 0;
7905 }
7906 }
7907
7908 /* Return true if `r' is known to be non-negative.
7909 Only handles constants at the moment. */
7910
7911 int
7912 rtl_expr_nonnegative_p (r)
7913 rtx r;
7914 {
7915 switch (GET_CODE (r))
7916 {
7917 case CONST_INT:
7918 return INTVAL (r) >= 0;
7919
7920 case CONST_DOUBLE:
7921 if (GET_MODE (r) == VOIDmode)
7922 return CONST_DOUBLE_HIGH (r) >= 0;
7923 return 0;
7924
7925 case CONST_VECTOR:
7926 {
7927 int units, i;
7928 rtx elt;
7929
7930 units = CONST_VECTOR_NUNITS (r);
7931
7932 for (i = 0; i < units; ++i)
7933 {
7934 elt = CONST_VECTOR_ELT (r, i);
7935 if (!rtl_expr_nonnegative_p (elt))
7936 return 0;
7937 }
7938
7939 return 1;
7940 }
7941
7942 case SYMBOL_REF:
7943 case LABEL_REF:
7944 /* These are always nonnegative. */
7945 return 1;
7946
7947 default:
7948 return 0;
7949 }
7950 }
7951
7952 #include "gt-fold-const.h"