re PR target/13926 (GCC generates jumps that are too large to fit in word displacemen...
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "real.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "toplev.h"
56 #include "ggc.h"
57 #include "hashtab.h"
58 #include "langhooks.h"
59 #include "md5.h"
60
61 /* The following constants represent a bit based encoding of GCC's
62 comparison operators. This encoding simplifies transformations
63 on relational comparison operators, such as AND and OR. */
64 enum comparison_code {
65 COMPCODE_FALSE = 0,
66 COMPCODE_LT = 1,
67 COMPCODE_EQ = 2,
68 COMPCODE_LE = 3,
69 COMPCODE_GT = 4,
70 COMPCODE_LTGT = 5,
71 COMPCODE_GE = 6,
72 COMPCODE_ORD = 7,
73 COMPCODE_UNORD = 8,
74 COMPCODE_UNLT = 9,
75 COMPCODE_UNEQ = 10,
76 COMPCODE_UNLE = 11,
77 COMPCODE_UNGT = 12,
78 COMPCODE_NE = 13,
79 COMPCODE_UNGE = 14,
80 COMPCODE_TRUE = 15
81 };
82
83 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
84 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
85 static bool negate_mathfn_p (enum built_in_function);
86 static bool negate_expr_p (tree);
87 static tree negate_expr (tree);
88 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
89 static tree associate_trees (tree, tree, enum tree_code, tree);
90 static tree const_binop (enum tree_code, tree, tree, int);
91 static hashval_t size_htab_hash (const void *);
92 static int size_htab_eq (const void *, const void *);
93 static tree fold_convert_const (enum tree_code, tree, tree);
94 static enum tree_code invert_tree_comparison (enum tree_code, bool);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static tree combine_comparisons (enum tree_code, enum tree_code,
98 enum tree_code, tree, tree, tree);
99 static int truth_value_p (enum tree_code);
100 static int operand_equal_for_comparison_p (tree, tree, tree);
101 static int twoval_comparison_p (tree, tree *, tree *, int *);
102 static tree eval_subst (tree, tree, tree, tree, tree);
103 static tree pedantic_omit_one_operand (tree, tree, tree);
104 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
105 static tree make_bit_field_ref (tree, tree, int, int, int);
106 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
107 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
109 tree *, tree *);
110 static int all_ones_mask_p (tree, int);
111 static tree sign_bit_p (tree, tree);
112 static int simple_operand_p (tree);
113 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
114 static tree make_range (tree, int *, tree *, tree *);
115 static tree build_range_check (tree, tree, int, tree, tree);
116 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
117 tree);
118 static tree fold_range_test (tree);
119 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
120 static tree unextend (tree, int, int, tree);
121 static tree fold_truthop (enum tree_code, tree, tree, tree);
122 static tree optimize_minmax_comparison (tree);
123 static tree extract_muldiv (tree, tree, enum tree_code, tree);
124 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
125 static int multiple_of_p (tree, tree, tree);
126 static tree constant_boolean_node (int, tree);
127 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
128 tree, int);
129 static bool fold_real_zero_addition_p (tree, tree, int);
130 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
131 tree, tree, tree);
132 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
133 static tree fold_div_compare (enum tree_code, tree, tree, tree);
134 static bool reorder_operands_p (tree, tree);
135 static tree fold_negate_const (tree, tree);
136 static tree fold_not_const (tree, tree);
137 static tree fold_relational_const (enum tree_code, tree, tree, tree);
138 static tree fold_relational_hi_lo (enum tree_code *, const tree,
139 tree *, tree *);
140
141 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
142 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
143 and SUM1. Then this yields nonzero if overflow occurred during the
144 addition.
145
146 Overflow occurs if A and B have the same sign, but A and SUM differ in
147 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
148 sign. */
149 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
150 \f
151 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
152 We do that by representing the two-word integer in 4 words, with only
153 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
154 number. The value of the word is LOWPART + HIGHPART * BASE. */
155
156 #define LOWPART(x) \
157 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
158 #define HIGHPART(x) \
159 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
160 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
161
162 /* Unpack a two-word integer into 4 words.
163 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
164 WORDS points to the array of HOST_WIDE_INTs. */
165
166 static void
167 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
168 {
169 words[0] = LOWPART (low);
170 words[1] = HIGHPART (low);
171 words[2] = LOWPART (hi);
172 words[3] = HIGHPART (hi);
173 }
174
175 /* Pack an array of 4 words into a two-word integer.
176 WORDS points to the array of words.
177 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
178
179 static void
180 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
181 HOST_WIDE_INT *hi)
182 {
183 *low = words[0] + words[1] * BASE;
184 *hi = words[2] + words[3] * BASE;
185 }
186 \f
187 /* Make the integer constant T valid for its type by setting to 0 or 1 all
188 the bits in the constant that don't belong in the type.
189
190 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
191 nonzero, a signed overflow has already occurred in calculating T, so
192 propagate it. */
193
194 int
195 force_fit_type (tree t, int overflow)
196 {
197 unsigned HOST_WIDE_INT low;
198 HOST_WIDE_INT high;
199 unsigned int prec;
200
201 if (TREE_CODE (t) == REAL_CST)
202 {
203 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
204 Consider doing it via real_convert now. */
205 return overflow;
206 }
207
208 else if (TREE_CODE (t) != INTEGER_CST)
209 return overflow;
210
211 low = TREE_INT_CST_LOW (t);
212 high = TREE_INT_CST_HIGH (t);
213
214 if (POINTER_TYPE_P (TREE_TYPE (t))
215 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
216 prec = POINTER_SIZE;
217 else
218 prec = TYPE_PRECISION (TREE_TYPE (t));
219
220 /* First clear all bits that are beyond the type's precision. */
221
222 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
223 ;
224 else if (prec > HOST_BITS_PER_WIDE_INT)
225 TREE_INT_CST_HIGH (t)
226 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
227 else
228 {
229 TREE_INT_CST_HIGH (t) = 0;
230 if (prec < HOST_BITS_PER_WIDE_INT)
231 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
232 }
233
234 /* Unsigned types do not suffer sign extension or overflow unless they
235 are a sizetype. */
236 if (TYPE_UNSIGNED (TREE_TYPE (t))
237 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
238 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
239 return overflow;
240
241 /* If the value's sign bit is set, extend the sign. */
242 if (prec != 2 * HOST_BITS_PER_WIDE_INT
243 && (prec > HOST_BITS_PER_WIDE_INT
244 ? 0 != (TREE_INT_CST_HIGH (t)
245 & ((HOST_WIDE_INT) 1
246 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
247 : 0 != (TREE_INT_CST_LOW (t)
248 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
249 {
250 /* Value is negative:
251 set to 1 all the bits that are outside this type's precision. */
252 if (prec > HOST_BITS_PER_WIDE_INT)
253 TREE_INT_CST_HIGH (t)
254 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
255 else
256 {
257 TREE_INT_CST_HIGH (t) = -1;
258 if (prec < HOST_BITS_PER_WIDE_INT)
259 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
260 }
261 }
262
263 /* Return nonzero if signed overflow occurred. */
264 return
265 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
266 != 0);
267 }
268 \f
269 /* Add two doubleword integers with doubleword result.
270 Each argument is given as two `HOST_WIDE_INT' pieces.
271 One argument is L1 and H1; the other, L2 and H2.
272 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
273
274 int
275 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
276 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
277 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
278 {
279 unsigned HOST_WIDE_INT l;
280 HOST_WIDE_INT h;
281
282 l = l1 + l2;
283 h = h1 + h2 + (l < l1);
284
285 *lv = l;
286 *hv = h;
287 return OVERFLOW_SUM_SIGN (h1, h2, h);
288 }
289
290 /* Negate a doubleword integer with doubleword result.
291 Return nonzero if the operation overflows, assuming it's signed.
292 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
293 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
294
295 int
296 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
297 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
298 {
299 if (l1 == 0)
300 {
301 *lv = 0;
302 *hv = - h1;
303 return (*hv & h1) < 0;
304 }
305 else
306 {
307 *lv = -l1;
308 *hv = ~h1;
309 return 0;
310 }
311 }
312 \f
313 /* Multiply two doubleword integers with doubleword result.
314 Return nonzero if the operation overflows, assuming it's signed.
315 Each argument is given as two `HOST_WIDE_INT' pieces.
316 One argument is L1 and H1; the other, L2 and H2.
317 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
318
319 int
320 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
321 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
322 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
323 {
324 HOST_WIDE_INT arg1[4];
325 HOST_WIDE_INT arg2[4];
326 HOST_WIDE_INT prod[4 * 2];
327 unsigned HOST_WIDE_INT carry;
328 int i, j, k;
329 unsigned HOST_WIDE_INT toplow, neglow;
330 HOST_WIDE_INT tophigh, neghigh;
331
332 encode (arg1, l1, h1);
333 encode (arg2, l2, h2);
334
335 memset (prod, 0, sizeof prod);
336
337 for (i = 0; i < 4; i++)
338 {
339 carry = 0;
340 for (j = 0; j < 4; j++)
341 {
342 k = i + j;
343 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
344 carry += arg1[i] * arg2[j];
345 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
346 carry += prod[k];
347 prod[k] = LOWPART (carry);
348 carry = HIGHPART (carry);
349 }
350 prod[i + 4] = carry;
351 }
352
353 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
354
355 /* Check for overflow by calculating the top half of the answer in full;
356 it should agree with the low half's sign bit. */
357 decode (prod + 4, &toplow, &tophigh);
358 if (h1 < 0)
359 {
360 neg_double (l2, h2, &neglow, &neghigh);
361 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
362 }
363 if (h2 < 0)
364 {
365 neg_double (l1, h1, &neglow, &neghigh);
366 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
367 }
368 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
369 }
370 \f
371 /* Shift the doubleword integer in L1, H1 left by COUNT places
372 keeping only PREC bits of result.
373 Shift right if COUNT is negative.
374 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
375 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
376
377 void
378 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
379 HOST_WIDE_INT count, unsigned int prec,
380 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
381 {
382 unsigned HOST_WIDE_INT signmask;
383
384 if (count < 0)
385 {
386 rshift_double (l1, h1, -count, prec, lv, hv, arith);
387 return;
388 }
389
390 if (SHIFT_COUNT_TRUNCATED)
391 count %= prec;
392
393 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
394 {
395 /* Shifting by the host word size is undefined according to the
396 ANSI standard, so we must handle this as a special case. */
397 *hv = 0;
398 *lv = 0;
399 }
400 else if (count >= HOST_BITS_PER_WIDE_INT)
401 {
402 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
403 *lv = 0;
404 }
405 else
406 {
407 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
408 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
409 *lv = l1 << count;
410 }
411
412 /* Sign extend all bits that are beyond the precision. */
413
414 signmask = -((prec > HOST_BITS_PER_WIDE_INT
415 ? ((unsigned HOST_WIDE_INT) *hv
416 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
417 : (*lv >> (prec - 1))) & 1);
418
419 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
420 ;
421 else if (prec >= HOST_BITS_PER_WIDE_INT)
422 {
423 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
424 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
425 }
426 else
427 {
428 *hv = signmask;
429 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
430 *lv |= signmask << prec;
431 }
432 }
433
434 /* Shift the doubleword integer in L1, H1 right by COUNT places
435 keeping only PREC bits of result. COUNT must be positive.
436 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
437 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
438
439 void
440 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
441 HOST_WIDE_INT count, unsigned int prec,
442 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
443 int arith)
444 {
445 unsigned HOST_WIDE_INT signmask;
446
447 signmask = (arith
448 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
449 : 0);
450
451 if (SHIFT_COUNT_TRUNCATED)
452 count %= prec;
453
454 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
455 {
456 /* Shifting by the host word size is undefined according to the
457 ANSI standard, so we must handle this as a special case. */
458 *hv = 0;
459 *lv = 0;
460 }
461 else if (count >= HOST_BITS_PER_WIDE_INT)
462 {
463 *hv = 0;
464 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
465 }
466 else
467 {
468 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
469 *lv = ((l1 >> count)
470 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
471 }
472
473 /* Zero / sign extend all bits that are beyond the precision. */
474
475 if (count >= (HOST_WIDE_INT)prec)
476 {
477 *hv = signmask;
478 *lv = signmask;
479 }
480 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
481 ;
482 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
483 {
484 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
485 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
486 }
487 else
488 {
489 *hv = signmask;
490 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
491 *lv |= signmask << (prec - count);
492 }
493 }
494 \f
495 /* Rotate the doubleword integer in L1, H1 left by COUNT places
496 keeping only PREC bits of result.
497 Rotate right if COUNT is negative.
498 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
499
500 void
501 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
502 HOST_WIDE_INT count, unsigned int prec,
503 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
504 {
505 unsigned HOST_WIDE_INT s1l, s2l;
506 HOST_WIDE_INT s1h, s2h;
507
508 count %= prec;
509 if (count < 0)
510 count += prec;
511
512 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
513 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
514 *lv = s1l | s2l;
515 *hv = s1h | s2h;
516 }
517
518 /* Rotate the doubleword integer in L1, H1 left by COUNT places
519 keeping only PREC bits of result. COUNT must be positive.
520 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
521
522 void
523 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
524 HOST_WIDE_INT count, unsigned int prec,
525 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
526 {
527 unsigned HOST_WIDE_INT s1l, s2l;
528 HOST_WIDE_INT s1h, s2h;
529
530 count %= prec;
531 if (count < 0)
532 count += prec;
533
534 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
535 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
536 *lv = s1l | s2l;
537 *hv = s1h | s2h;
538 }
539 \f
540 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
541 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
542 CODE is a tree code for a kind of division, one of
543 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
544 or EXACT_DIV_EXPR
545 It controls how the quotient is rounded to an integer.
546 Return nonzero if the operation overflows.
547 UNS nonzero says do unsigned division. */
548
549 int
550 div_and_round_double (enum tree_code code, int uns,
551 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
552 HOST_WIDE_INT hnum_orig,
553 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
554 HOST_WIDE_INT hden_orig,
555 unsigned HOST_WIDE_INT *lquo,
556 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
557 HOST_WIDE_INT *hrem)
558 {
559 int quo_neg = 0;
560 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
561 HOST_WIDE_INT den[4], quo[4];
562 int i, j;
563 unsigned HOST_WIDE_INT work;
564 unsigned HOST_WIDE_INT carry = 0;
565 unsigned HOST_WIDE_INT lnum = lnum_orig;
566 HOST_WIDE_INT hnum = hnum_orig;
567 unsigned HOST_WIDE_INT lden = lden_orig;
568 HOST_WIDE_INT hden = hden_orig;
569 int overflow = 0;
570
571 if (hden == 0 && lden == 0)
572 overflow = 1, lden = 1;
573
574 /* Calculate quotient sign and convert operands to unsigned. */
575 if (!uns)
576 {
577 if (hnum < 0)
578 {
579 quo_neg = ~ quo_neg;
580 /* (minimum integer) / (-1) is the only overflow case. */
581 if (neg_double (lnum, hnum, &lnum, &hnum)
582 && ((HOST_WIDE_INT) lden & hden) == -1)
583 overflow = 1;
584 }
585 if (hden < 0)
586 {
587 quo_neg = ~ quo_neg;
588 neg_double (lden, hden, &lden, &hden);
589 }
590 }
591
592 if (hnum == 0 && hden == 0)
593 { /* single precision */
594 *hquo = *hrem = 0;
595 /* This unsigned division rounds toward zero. */
596 *lquo = lnum / lden;
597 goto finish_up;
598 }
599
600 if (hnum == 0)
601 { /* trivial case: dividend < divisor */
602 /* hden != 0 already checked. */
603 *hquo = *lquo = 0;
604 *hrem = hnum;
605 *lrem = lnum;
606 goto finish_up;
607 }
608
609 memset (quo, 0, sizeof quo);
610
611 memset (num, 0, sizeof num); /* to zero 9th element */
612 memset (den, 0, sizeof den);
613
614 encode (num, lnum, hnum);
615 encode (den, lden, hden);
616
617 /* Special code for when the divisor < BASE. */
618 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
619 {
620 /* hnum != 0 already checked. */
621 for (i = 4 - 1; i >= 0; i--)
622 {
623 work = num[i] + carry * BASE;
624 quo[i] = work / lden;
625 carry = work % lden;
626 }
627 }
628 else
629 {
630 /* Full double precision division,
631 with thanks to Don Knuth's "Seminumerical Algorithms". */
632 int num_hi_sig, den_hi_sig;
633 unsigned HOST_WIDE_INT quo_est, scale;
634
635 /* Find the highest nonzero divisor digit. */
636 for (i = 4 - 1;; i--)
637 if (den[i] != 0)
638 {
639 den_hi_sig = i;
640 break;
641 }
642
643 /* Insure that the first digit of the divisor is at least BASE/2.
644 This is required by the quotient digit estimation algorithm. */
645
646 scale = BASE / (den[den_hi_sig] + 1);
647 if (scale > 1)
648 { /* scale divisor and dividend */
649 carry = 0;
650 for (i = 0; i <= 4 - 1; i++)
651 {
652 work = (num[i] * scale) + carry;
653 num[i] = LOWPART (work);
654 carry = HIGHPART (work);
655 }
656
657 num[4] = carry;
658 carry = 0;
659 for (i = 0; i <= 4 - 1; i++)
660 {
661 work = (den[i] * scale) + carry;
662 den[i] = LOWPART (work);
663 carry = HIGHPART (work);
664 if (den[i] != 0) den_hi_sig = i;
665 }
666 }
667
668 num_hi_sig = 4;
669
670 /* Main loop */
671 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
672 {
673 /* Guess the next quotient digit, quo_est, by dividing the first
674 two remaining dividend digits by the high order quotient digit.
675 quo_est is never low and is at most 2 high. */
676 unsigned HOST_WIDE_INT tmp;
677
678 num_hi_sig = i + den_hi_sig + 1;
679 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
680 if (num[num_hi_sig] != den[den_hi_sig])
681 quo_est = work / den[den_hi_sig];
682 else
683 quo_est = BASE - 1;
684
685 /* Refine quo_est so it's usually correct, and at most one high. */
686 tmp = work - quo_est * den[den_hi_sig];
687 if (tmp < BASE
688 && (den[den_hi_sig - 1] * quo_est
689 > (tmp * BASE + num[num_hi_sig - 2])))
690 quo_est--;
691
692 /* Try QUO_EST as the quotient digit, by multiplying the
693 divisor by QUO_EST and subtracting from the remaining dividend.
694 Keep in mind that QUO_EST is the I - 1st digit. */
695
696 carry = 0;
697 for (j = 0; j <= den_hi_sig; j++)
698 {
699 work = quo_est * den[j] + carry;
700 carry = HIGHPART (work);
701 work = num[i + j] - LOWPART (work);
702 num[i + j] = LOWPART (work);
703 carry += HIGHPART (work) != 0;
704 }
705
706 /* If quo_est was high by one, then num[i] went negative and
707 we need to correct things. */
708 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
709 {
710 quo_est--;
711 carry = 0; /* add divisor back in */
712 for (j = 0; j <= den_hi_sig; j++)
713 {
714 work = num[i + j] + den[j] + carry;
715 carry = HIGHPART (work);
716 num[i + j] = LOWPART (work);
717 }
718
719 num [num_hi_sig] += carry;
720 }
721
722 /* Store the quotient digit. */
723 quo[i] = quo_est;
724 }
725 }
726
727 decode (quo, lquo, hquo);
728
729 finish_up:
730 /* If result is negative, make it so. */
731 if (quo_neg)
732 neg_double (*lquo, *hquo, lquo, hquo);
733
734 /* Compute trial remainder: rem = num - (quo * den) */
735 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
736 neg_double (*lrem, *hrem, lrem, hrem);
737 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
738
739 switch (code)
740 {
741 case TRUNC_DIV_EXPR:
742 case TRUNC_MOD_EXPR: /* round toward zero */
743 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
744 return overflow;
745
746 case FLOOR_DIV_EXPR:
747 case FLOOR_MOD_EXPR: /* round toward negative infinity */
748 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
749 {
750 /* quo = quo - 1; */
751 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
752 lquo, hquo);
753 }
754 else
755 return overflow;
756 break;
757
758 case CEIL_DIV_EXPR:
759 case CEIL_MOD_EXPR: /* round toward positive infinity */
760 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
761 {
762 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
763 lquo, hquo);
764 }
765 else
766 return overflow;
767 break;
768
769 case ROUND_DIV_EXPR:
770 case ROUND_MOD_EXPR: /* round to closest integer */
771 {
772 unsigned HOST_WIDE_INT labs_rem = *lrem;
773 HOST_WIDE_INT habs_rem = *hrem;
774 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
775 HOST_WIDE_INT habs_den = hden, htwice;
776
777 /* Get absolute values. */
778 if (*hrem < 0)
779 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
780 if (hden < 0)
781 neg_double (lden, hden, &labs_den, &habs_den);
782
783 /* If (2 * abs (lrem) >= abs (lden)) */
784 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
785 labs_rem, habs_rem, &ltwice, &htwice);
786
787 if (((unsigned HOST_WIDE_INT) habs_den
788 < (unsigned HOST_WIDE_INT) htwice)
789 || (((unsigned HOST_WIDE_INT) habs_den
790 == (unsigned HOST_WIDE_INT) htwice)
791 && (labs_den < ltwice)))
792 {
793 if (*hquo < 0)
794 /* quo = quo - 1; */
795 add_double (*lquo, *hquo,
796 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
797 else
798 /* quo = quo + 1; */
799 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
800 lquo, hquo);
801 }
802 else
803 return overflow;
804 }
805 break;
806
807 default:
808 abort ();
809 }
810
811 /* Compute true remainder: rem = num - (quo * den) */
812 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
813 neg_double (*lrem, *hrem, lrem, hrem);
814 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
815 return overflow;
816 }
817 \f
818 /* Return true if built-in mathematical function specified by CODE
819 preserves the sign of it argument, i.e. -f(x) == f(-x). */
820
821 static bool
822 negate_mathfn_p (enum built_in_function code)
823 {
824 switch (code)
825 {
826 case BUILT_IN_ASIN:
827 case BUILT_IN_ASINF:
828 case BUILT_IN_ASINL:
829 case BUILT_IN_ATAN:
830 case BUILT_IN_ATANF:
831 case BUILT_IN_ATANL:
832 case BUILT_IN_SIN:
833 case BUILT_IN_SINF:
834 case BUILT_IN_SINL:
835 case BUILT_IN_TAN:
836 case BUILT_IN_TANF:
837 case BUILT_IN_TANL:
838 return true;
839
840 default:
841 break;
842 }
843 return false;
844 }
845
846 /* Determine whether an expression T can be cheaply negated using
847 the function negate_expr. */
848
849 static bool
850 negate_expr_p (tree t)
851 {
852 unsigned HOST_WIDE_INT val;
853 unsigned int prec;
854 tree type;
855
856 if (t == 0)
857 return false;
858
859 type = TREE_TYPE (t);
860
861 STRIP_SIGN_NOPS (t);
862 switch (TREE_CODE (t))
863 {
864 case INTEGER_CST:
865 if (TYPE_UNSIGNED (type) || ! flag_trapv)
866 return true;
867
868 /* Check that -CST will not overflow type. */
869 prec = TYPE_PRECISION (type);
870 if (prec > HOST_BITS_PER_WIDE_INT)
871 {
872 if (TREE_INT_CST_LOW (t) != 0)
873 return true;
874 prec -= HOST_BITS_PER_WIDE_INT;
875 val = TREE_INT_CST_HIGH (t);
876 }
877 else
878 val = TREE_INT_CST_LOW (t);
879 if (prec < HOST_BITS_PER_WIDE_INT)
880 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
881 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
882
883 case REAL_CST:
884 case NEGATE_EXPR:
885 return true;
886
887 case COMPLEX_CST:
888 return negate_expr_p (TREE_REALPART (t))
889 && negate_expr_p (TREE_IMAGPART (t));
890
891 case PLUS_EXPR:
892 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
893 return false;
894 /* -(A + B) -> (-B) - A. */
895 if (negate_expr_p (TREE_OPERAND (t, 1))
896 && reorder_operands_p (TREE_OPERAND (t, 0),
897 TREE_OPERAND (t, 1)))
898 return true;
899 /* -(A + B) -> (-A) - B. */
900 return negate_expr_p (TREE_OPERAND (t, 0));
901
902 case MINUS_EXPR:
903 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
904 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
905 && reorder_operands_p (TREE_OPERAND (t, 0),
906 TREE_OPERAND (t, 1));
907
908 case MULT_EXPR:
909 if (TYPE_UNSIGNED (TREE_TYPE (t)))
910 break;
911
912 /* Fall through. */
913
914 case RDIV_EXPR:
915 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
916 return negate_expr_p (TREE_OPERAND (t, 1))
917 || negate_expr_p (TREE_OPERAND (t, 0));
918 break;
919
920 case NOP_EXPR:
921 /* Negate -((double)float) as (double)(-float). */
922 if (TREE_CODE (type) == REAL_TYPE)
923 {
924 tree tem = strip_float_extensions (t);
925 if (tem != t)
926 return negate_expr_p (tem);
927 }
928 break;
929
930 case CALL_EXPR:
931 /* Negate -f(x) as f(-x). */
932 if (negate_mathfn_p (builtin_mathfn_code (t)))
933 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
934 break;
935
936 case RSHIFT_EXPR:
937 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
938 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
939 {
940 tree op1 = TREE_OPERAND (t, 1);
941 if (TREE_INT_CST_HIGH (op1) == 0
942 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
943 == TREE_INT_CST_LOW (op1))
944 return true;
945 }
946 break;
947
948 default:
949 break;
950 }
951 return false;
952 }
953
954 /* Given T, an expression, return the negation of T. Allow for T to be
955 null, in which case return null. */
956
957 static tree
958 negate_expr (tree t)
959 {
960 tree type;
961 tree tem;
962
963 if (t == 0)
964 return 0;
965
966 type = TREE_TYPE (t);
967 STRIP_SIGN_NOPS (t);
968
969 switch (TREE_CODE (t))
970 {
971 case INTEGER_CST:
972 tem = fold_negate_const (t, type);
973 if (! TREE_OVERFLOW (tem)
974 || TYPE_UNSIGNED (type)
975 || ! flag_trapv)
976 return tem;
977 break;
978
979 case REAL_CST:
980 tem = fold_negate_const (t, type);
981 /* Two's complement FP formats, such as c4x, may overflow. */
982 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
983 return fold_convert (type, tem);
984 break;
985
986 case COMPLEX_CST:
987 {
988 tree rpart = negate_expr (TREE_REALPART (t));
989 tree ipart = negate_expr (TREE_IMAGPART (t));
990
991 if ((TREE_CODE (rpart) == REAL_CST
992 && TREE_CODE (ipart) == REAL_CST)
993 || (TREE_CODE (rpart) == INTEGER_CST
994 && TREE_CODE (ipart) == INTEGER_CST))
995 return build_complex (type, rpart, ipart);
996 }
997 break;
998
999 case NEGATE_EXPR:
1000 return fold_convert (type, TREE_OPERAND (t, 0));
1001
1002 case PLUS_EXPR:
1003 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1004 {
1005 /* -(A + B) -> (-B) - A. */
1006 if (negate_expr_p (TREE_OPERAND (t, 1))
1007 && reorder_operands_p (TREE_OPERAND (t, 0),
1008 TREE_OPERAND (t, 1)))
1009 {
1010 tem = negate_expr (TREE_OPERAND (t, 1));
1011 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1012 tem, TREE_OPERAND (t, 0)));
1013 return fold_convert (type, tem);
1014 }
1015
1016 /* -(A + B) -> (-A) - B. */
1017 if (negate_expr_p (TREE_OPERAND (t, 0)))
1018 {
1019 tem = negate_expr (TREE_OPERAND (t, 0));
1020 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1021 tem, TREE_OPERAND (t, 1)));
1022 return fold_convert (type, tem);
1023 }
1024 }
1025 break;
1026
1027 case MINUS_EXPR:
1028 /* - (A - B) -> B - A */
1029 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1030 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1031 return fold_convert (type,
1032 fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1033 TREE_OPERAND (t, 1),
1034 TREE_OPERAND (t, 0))));
1035 break;
1036
1037 case MULT_EXPR:
1038 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1039 break;
1040
1041 /* Fall through. */
1042
1043 case RDIV_EXPR:
1044 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1045 {
1046 tem = TREE_OPERAND (t, 1);
1047 if (negate_expr_p (tem))
1048 return fold_convert (type,
1049 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1050 TREE_OPERAND (t, 0),
1051 negate_expr (tem))));
1052 tem = TREE_OPERAND (t, 0);
1053 if (negate_expr_p (tem))
1054 return fold_convert (type,
1055 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1056 negate_expr (tem),
1057 TREE_OPERAND (t, 1))));
1058 }
1059 break;
1060
1061 case NOP_EXPR:
1062 /* Convert -((double)float) into (double)(-float). */
1063 if (TREE_CODE (type) == REAL_TYPE)
1064 {
1065 tem = strip_float_extensions (t);
1066 if (tem != t && negate_expr_p (tem))
1067 return fold_convert (type, negate_expr (tem));
1068 }
1069 break;
1070
1071 case CALL_EXPR:
1072 /* Negate -f(x) as f(-x). */
1073 if (negate_mathfn_p (builtin_mathfn_code (t))
1074 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1075 {
1076 tree fndecl, arg, arglist;
1077
1078 fndecl = get_callee_fndecl (t);
1079 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1080 arglist = build_tree_list (NULL_TREE, arg);
1081 return build_function_call_expr (fndecl, arglist);
1082 }
1083 break;
1084
1085 case RSHIFT_EXPR:
1086 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1087 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1088 {
1089 tree op1 = TREE_OPERAND (t, 1);
1090 if (TREE_INT_CST_HIGH (op1) == 0
1091 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1092 == TREE_INT_CST_LOW (op1))
1093 {
1094 tree ntype = TYPE_UNSIGNED (type)
1095 ? lang_hooks.types.signed_type (type)
1096 : lang_hooks.types.unsigned_type (type);
1097 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1098 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1099 return fold_convert (type, temp);
1100 }
1101 }
1102 break;
1103
1104 default:
1105 break;
1106 }
1107
1108 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1109 return fold_convert (type, tem);
1110 }
1111 \f
1112 /* Split a tree IN into a constant, literal and variable parts that could be
1113 combined with CODE to make IN. "constant" means an expression with
1114 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1115 commutative arithmetic operation. Store the constant part into *CONP,
1116 the literal in *LITP and return the variable part. If a part isn't
1117 present, set it to null. If the tree does not decompose in this way,
1118 return the entire tree as the variable part and the other parts as null.
1119
1120 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1121 case, we negate an operand that was subtracted. Except if it is a
1122 literal for which we use *MINUS_LITP instead.
1123
1124 If NEGATE_P is true, we are negating all of IN, again except a literal
1125 for which we use *MINUS_LITP instead.
1126
1127 If IN is itself a literal or constant, return it as appropriate.
1128
1129 Note that we do not guarantee that any of the three values will be the
1130 same type as IN, but they will have the same signedness and mode. */
1131
1132 static tree
1133 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1134 tree *minus_litp, int negate_p)
1135 {
1136 tree var = 0;
1137
1138 *conp = 0;
1139 *litp = 0;
1140 *minus_litp = 0;
1141
1142 /* Strip any conversions that don't change the machine mode or signedness. */
1143 STRIP_SIGN_NOPS (in);
1144
1145 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1146 *litp = in;
1147 else if (TREE_CODE (in) == code
1148 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1149 /* We can associate addition and subtraction together (even
1150 though the C standard doesn't say so) for integers because
1151 the value is not affected. For reals, the value might be
1152 affected, so we can't. */
1153 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1154 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1155 {
1156 tree op0 = TREE_OPERAND (in, 0);
1157 tree op1 = TREE_OPERAND (in, 1);
1158 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1159 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1160
1161 /* First see if either of the operands is a literal, then a constant. */
1162 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1163 *litp = op0, op0 = 0;
1164 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1165 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1166
1167 if (op0 != 0 && TREE_CONSTANT (op0))
1168 *conp = op0, op0 = 0;
1169 else if (op1 != 0 && TREE_CONSTANT (op1))
1170 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1171
1172 /* If we haven't dealt with either operand, this is not a case we can
1173 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1174 if (op0 != 0 && op1 != 0)
1175 var = in;
1176 else if (op0 != 0)
1177 var = op0;
1178 else
1179 var = op1, neg_var_p = neg1_p;
1180
1181 /* Now do any needed negations. */
1182 if (neg_litp_p)
1183 *minus_litp = *litp, *litp = 0;
1184 if (neg_conp_p)
1185 *conp = negate_expr (*conp);
1186 if (neg_var_p)
1187 var = negate_expr (var);
1188 }
1189 else if (TREE_CONSTANT (in))
1190 *conp = in;
1191 else
1192 var = in;
1193
1194 if (negate_p)
1195 {
1196 if (*litp)
1197 *minus_litp = *litp, *litp = 0;
1198 else if (*minus_litp)
1199 *litp = *minus_litp, *minus_litp = 0;
1200 *conp = negate_expr (*conp);
1201 var = negate_expr (var);
1202 }
1203
1204 return var;
1205 }
1206
1207 /* Re-associate trees split by the above function. T1 and T2 are either
1208 expressions to associate or null. Return the new expression, if any. If
1209 we build an operation, do it in TYPE and with CODE. */
1210
1211 static tree
1212 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1213 {
1214 if (t1 == 0)
1215 return t2;
1216 else if (t2 == 0)
1217 return t1;
1218
1219 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1220 try to fold this since we will have infinite recursion. But do
1221 deal with any NEGATE_EXPRs. */
1222 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1223 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1224 {
1225 if (code == PLUS_EXPR)
1226 {
1227 if (TREE_CODE (t1) == NEGATE_EXPR)
1228 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1229 fold_convert (type, TREE_OPERAND (t1, 0)));
1230 else if (TREE_CODE (t2) == NEGATE_EXPR)
1231 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1232 fold_convert (type, TREE_OPERAND (t2, 0)));
1233 }
1234 return build2 (code, type, fold_convert (type, t1),
1235 fold_convert (type, t2));
1236 }
1237
1238 return fold (build2 (code, type, fold_convert (type, t1),
1239 fold_convert (type, t2)));
1240 }
1241 \f
1242 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1243 to produce a new constant.
1244
1245 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1246
1247 tree
1248 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1249 {
1250 unsigned HOST_WIDE_INT int1l, int2l;
1251 HOST_WIDE_INT int1h, int2h;
1252 unsigned HOST_WIDE_INT low;
1253 HOST_WIDE_INT hi;
1254 unsigned HOST_WIDE_INT garbagel;
1255 HOST_WIDE_INT garbageh;
1256 tree t;
1257 tree type = TREE_TYPE (arg1);
1258 int uns = TYPE_UNSIGNED (type);
1259 int is_sizetype
1260 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1261 int overflow = 0;
1262 int no_overflow = 0;
1263
1264 int1l = TREE_INT_CST_LOW (arg1);
1265 int1h = TREE_INT_CST_HIGH (arg1);
1266 int2l = TREE_INT_CST_LOW (arg2);
1267 int2h = TREE_INT_CST_HIGH (arg2);
1268
1269 switch (code)
1270 {
1271 case BIT_IOR_EXPR:
1272 low = int1l | int2l, hi = int1h | int2h;
1273 break;
1274
1275 case BIT_XOR_EXPR:
1276 low = int1l ^ int2l, hi = int1h ^ int2h;
1277 break;
1278
1279 case BIT_AND_EXPR:
1280 low = int1l & int2l, hi = int1h & int2h;
1281 break;
1282
1283 case RSHIFT_EXPR:
1284 int2l = -int2l;
1285 case LSHIFT_EXPR:
1286 /* It's unclear from the C standard whether shifts can overflow.
1287 The following code ignores overflow; perhaps a C standard
1288 interpretation ruling is needed. */
1289 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1290 &low, &hi, !uns);
1291 no_overflow = 1;
1292 break;
1293
1294 case RROTATE_EXPR:
1295 int2l = - int2l;
1296 case LROTATE_EXPR:
1297 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1298 &low, &hi);
1299 break;
1300
1301 case PLUS_EXPR:
1302 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1303 break;
1304
1305 case MINUS_EXPR:
1306 neg_double (int2l, int2h, &low, &hi);
1307 add_double (int1l, int1h, low, hi, &low, &hi);
1308 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1309 break;
1310
1311 case MULT_EXPR:
1312 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1313 break;
1314
1315 case TRUNC_DIV_EXPR:
1316 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1317 case EXACT_DIV_EXPR:
1318 /* This is a shortcut for a common special case. */
1319 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1320 && ! TREE_CONSTANT_OVERFLOW (arg1)
1321 && ! TREE_CONSTANT_OVERFLOW (arg2)
1322 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1323 {
1324 if (code == CEIL_DIV_EXPR)
1325 int1l += int2l - 1;
1326
1327 low = int1l / int2l, hi = 0;
1328 break;
1329 }
1330
1331 /* ... fall through ... */
1332
1333 case ROUND_DIV_EXPR:
1334 if (int2h == 0 && int2l == 1)
1335 {
1336 low = int1l, hi = int1h;
1337 break;
1338 }
1339 if (int1l == int2l && int1h == int2h
1340 && ! (int1l == 0 && int1h == 0))
1341 {
1342 low = 1, hi = 0;
1343 break;
1344 }
1345 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1346 &low, &hi, &garbagel, &garbageh);
1347 break;
1348
1349 case TRUNC_MOD_EXPR:
1350 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1351 /* This is a shortcut for a common special case. */
1352 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1353 && ! TREE_CONSTANT_OVERFLOW (arg1)
1354 && ! TREE_CONSTANT_OVERFLOW (arg2)
1355 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1356 {
1357 if (code == CEIL_MOD_EXPR)
1358 int1l += int2l - 1;
1359 low = int1l % int2l, hi = 0;
1360 break;
1361 }
1362
1363 /* ... fall through ... */
1364
1365 case ROUND_MOD_EXPR:
1366 overflow = div_and_round_double (code, uns,
1367 int1l, int1h, int2l, int2h,
1368 &garbagel, &garbageh, &low, &hi);
1369 break;
1370
1371 case MIN_EXPR:
1372 case MAX_EXPR:
1373 if (uns)
1374 low = (((unsigned HOST_WIDE_INT) int1h
1375 < (unsigned HOST_WIDE_INT) int2h)
1376 || (((unsigned HOST_WIDE_INT) int1h
1377 == (unsigned HOST_WIDE_INT) int2h)
1378 && int1l < int2l));
1379 else
1380 low = (int1h < int2h
1381 || (int1h == int2h && int1l < int2l));
1382
1383 if (low == (code == MIN_EXPR))
1384 low = int1l, hi = int1h;
1385 else
1386 low = int2l, hi = int2h;
1387 break;
1388
1389 default:
1390 abort ();
1391 }
1392
1393 /* If this is for a sizetype, can be represented as one (signed)
1394 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1395 constants. */
1396 if (is_sizetype
1397 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1398 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1399 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1400 return size_int_type_wide (low, type);
1401 else
1402 {
1403 t = build_int_2 (low, hi);
1404 TREE_TYPE (t) = TREE_TYPE (arg1);
1405 }
1406
1407 TREE_OVERFLOW (t)
1408 = ((notrunc
1409 ? (!uns || is_sizetype) && overflow
1410 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1411 && ! no_overflow))
1412 | TREE_OVERFLOW (arg1)
1413 | TREE_OVERFLOW (arg2));
1414
1415 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1416 So check if force_fit_type truncated the value. */
1417 if (is_sizetype
1418 && ! TREE_OVERFLOW (t)
1419 && (TREE_INT_CST_HIGH (t) != hi
1420 || TREE_INT_CST_LOW (t) != low))
1421 TREE_OVERFLOW (t) = 1;
1422
1423 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1424 | TREE_CONSTANT_OVERFLOW (arg1)
1425 | TREE_CONSTANT_OVERFLOW (arg2));
1426 return t;
1427 }
1428
1429 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1430 constant. We assume ARG1 and ARG2 have the same data type, or at least
1431 are the same kind of constant and the same machine mode.
1432
1433 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1434
1435 static tree
1436 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1437 {
1438 STRIP_NOPS (arg1);
1439 STRIP_NOPS (arg2);
1440
1441 if (TREE_CODE (arg1) == INTEGER_CST)
1442 return int_const_binop (code, arg1, arg2, notrunc);
1443
1444 if (TREE_CODE (arg1) == REAL_CST)
1445 {
1446 enum machine_mode mode;
1447 REAL_VALUE_TYPE d1;
1448 REAL_VALUE_TYPE d2;
1449 REAL_VALUE_TYPE value;
1450 tree t, type;
1451
1452 d1 = TREE_REAL_CST (arg1);
1453 d2 = TREE_REAL_CST (arg2);
1454
1455 type = TREE_TYPE (arg1);
1456 mode = TYPE_MODE (type);
1457
1458 /* Don't perform operation if we honor signaling NaNs and
1459 either operand is a NaN. */
1460 if (HONOR_SNANS (mode)
1461 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1462 return NULL_TREE;
1463
1464 /* Don't perform operation if it would raise a division
1465 by zero exception. */
1466 if (code == RDIV_EXPR
1467 && REAL_VALUES_EQUAL (d2, dconst0)
1468 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1469 return NULL_TREE;
1470
1471 /* If either operand is a NaN, just return it. Otherwise, set up
1472 for floating-point trap; we return an overflow. */
1473 if (REAL_VALUE_ISNAN (d1))
1474 return arg1;
1475 else if (REAL_VALUE_ISNAN (d2))
1476 return arg2;
1477
1478 REAL_ARITHMETIC (value, code, d1, d2);
1479
1480 t = build_real (type, real_value_truncate (mode, value));
1481
1482 TREE_OVERFLOW (t)
1483 = (force_fit_type (t, 0)
1484 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1485 TREE_CONSTANT_OVERFLOW (t)
1486 = TREE_OVERFLOW (t)
1487 | TREE_CONSTANT_OVERFLOW (arg1)
1488 | TREE_CONSTANT_OVERFLOW (arg2);
1489 return t;
1490 }
1491 if (TREE_CODE (arg1) == COMPLEX_CST)
1492 {
1493 tree type = TREE_TYPE (arg1);
1494 tree r1 = TREE_REALPART (arg1);
1495 tree i1 = TREE_IMAGPART (arg1);
1496 tree r2 = TREE_REALPART (arg2);
1497 tree i2 = TREE_IMAGPART (arg2);
1498 tree t;
1499
1500 switch (code)
1501 {
1502 case PLUS_EXPR:
1503 t = build_complex (type,
1504 const_binop (PLUS_EXPR, r1, r2, notrunc),
1505 const_binop (PLUS_EXPR, i1, i2, notrunc));
1506 break;
1507
1508 case MINUS_EXPR:
1509 t = build_complex (type,
1510 const_binop (MINUS_EXPR, r1, r2, notrunc),
1511 const_binop (MINUS_EXPR, i1, i2, notrunc));
1512 break;
1513
1514 case MULT_EXPR:
1515 t = build_complex (type,
1516 const_binop (MINUS_EXPR,
1517 const_binop (MULT_EXPR,
1518 r1, r2, notrunc),
1519 const_binop (MULT_EXPR,
1520 i1, i2, notrunc),
1521 notrunc),
1522 const_binop (PLUS_EXPR,
1523 const_binop (MULT_EXPR,
1524 r1, i2, notrunc),
1525 const_binop (MULT_EXPR,
1526 i1, r2, notrunc),
1527 notrunc));
1528 break;
1529
1530 case RDIV_EXPR:
1531 {
1532 tree magsquared
1533 = const_binop (PLUS_EXPR,
1534 const_binop (MULT_EXPR, r2, r2, notrunc),
1535 const_binop (MULT_EXPR, i2, i2, notrunc),
1536 notrunc);
1537
1538 t = build_complex (type,
1539 const_binop
1540 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1541 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1542 const_binop (PLUS_EXPR,
1543 const_binop (MULT_EXPR, r1, r2,
1544 notrunc),
1545 const_binop (MULT_EXPR, i1, i2,
1546 notrunc),
1547 notrunc),
1548 magsquared, notrunc),
1549 const_binop
1550 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1551 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1552 const_binop (MINUS_EXPR,
1553 const_binop (MULT_EXPR, i1, r2,
1554 notrunc),
1555 const_binop (MULT_EXPR, r1, i2,
1556 notrunc),
1557 notrunc),
1558 magsquared, notrunc));
1559 }
1560 break;
1561
1562 default:
1563 abort ();
1564 }
1565 return t;
1566 }
1567 return 0;
1568 }
1569
1570 /* These are the hash table functions for the hash table of INTEGER_CST
1571 nodes of a sizetype. */
1572
1573 /* Return the hash code code X, an INTEGER_CST. */
1574
1575 static hashval_t
1576 size_htab_hash (const void *x)
1577 {
1578 tree t = (tree) x;
1579
1580 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1581 ^ htab_hash_pointer (TREE_TYPE (t))
1582 ^ (TREE_OVERFLOW (t) << 20));
1583 }
1584
1585 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1586 is the same as that given by *Y, which is the same. */
1587
1588 static int
1589 size_htab_eq (const void *x, const void *y)
1590 {
1591 tree xt = (tree) x;
1592 tree yt = (tree) y;
1593
1594 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1595 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1596 && TREE_TYPE (xt) == TREE_TYPE (yt)
1597 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1598 }
1599 \f
1600 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1601 bits are given by NUMBER and of the sizetype represented by KIND. */
1602
1603 tree
1604 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1605 {
1606 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1607 }
1608
1609 /* Likewise, but the desired type is specified explicitly. */
1610
1611 static GTY (()) tree new_const;
1612 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1613 htab_t size_htab;
1614
1615 tree
1616 size_int_type_wide (HOST_WIDE_INT number, tree type)
1617 {
1618 void **slot;
1619
1620 if (size_htab == 0)
1621 {
1622 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1623 new_const = make_node (INTEGER_CST);
1624 }
1625
1626 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1627 hash table, we return the value from the hash table. Otherwise, we
1628 place that in the hash table and make a new node for the next time. */
1629 TREE_INT_CST_LOW (new_const) = number;
1630 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1631 TREE_TYPE (new_const) = type;
1632 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1633 = force_fit_type (new_const, 0);
1634
1635 slot = htab_find_slot (size_htab, new_const, INSERT);
1636 if (*slot == 0)
1637 {
1638 tree t = new_const;
1639
1640 *slot = new_const;
1641 new_const = make_node (INTEGER_CST);
1642 return t;
1643 }
1644 else
1645 return (tree) *slot;
1646 }
1647
1648 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1649 is a tree code. The type of the result is taken from the operands.
1650 Both must be the same type integer type and it must be a size type.
1651 If the operands are constant, so is the result. */
1652
1653 tree
1654 size_binop (enum tree_code code, tree arg0, tree arg1)
1655 {
1656 tree type = TREE_TYPE (arg0);
1657
1658 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1659 || type != TREE_TYPE (arg1))
1660 abort ();
1661
1662 /* Handle the special case of two integer constants faster. */
1663 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1664 {
1665 /* And some specific cases even faster than that. */
1666 if (code == PLUS_EXPR && integer_zerop (arg0))
1667 return arg1;
1668 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1669 && integer_zerop (arg1))
1670 return arg0;
1671 else if (code == MULT_EXPR && integer_onep (arg0))
1672 return arg1;
1673
1674 /* Handle general case of two integer constants. */
1675 return int_const_binop (code, arg0, arg1, 0);
1676 }
1677
1678 if (arg0 == error_mark_node || arg1 == error_mark_node)
1679 return error_mark_node;
1680
1681 return fold (build2 (code, type, arg0, arg1));
1682 }
1683
1684 /* Given two values, either both of sizetype or both of bitsizetype,
1685 compute the difference between the two values. Return the value
1686 in signed type corresponding to the type of the operands. */
1687
1688 tree
1689 size_diffop (tree arg0, tree arg1)
1690 {
1691 tree type = TREE_TYPE (arg0);
1692 tree ctype;
1693
1694 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1695 || type != TREE_TYPE (arg1))
1696 abort ();
1697
1698 /* If the type is already signed, just do the simple thing. */
1699 if (!TYPE_UNSIGNED (type))
1700 return size_binop (MINUS_EXPR, arg0, arg1);
1701
1702 ctype = (type == bitsizetype || type == ubitsizetype
1703 ? sbitsizetype : ssizetype);
1704
1705 /* If either operand is not a constant, do the conversions to the signed
1706 type and subtract. The hardware will do the right thing with any
1707 overflow in the subtraction. */
1708 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1709 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1710 fold_convert (ctype, arg1));
1711
1712 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1713 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1714 overflow) and negate (which can't either). Special-case a result
1715 of zero while we're here. */
1716 if (tree_int_cst_equal (arg0, arg1))
1717 return fold_convert (ctype, integer_zero_node);
1718 else if (tree_int_cst_lt (arg1, arg0))
1719 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1720 else
1721 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1722 fold_convert (ctype, size_binop (MINUS_EXPR,
1723 arg1, arg0)));
1724 }
1725 \f
1726
1727 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1728 type TYPE. If no simplification can be done return NULL_TREE. */
1729
1730 static tree
1731 fold_convert_const (enum tree_code code, tree type, tree arg1)
1732 {
1733 int overflow = 0;
1734 tree t;
1735
1736 if (TREE_TYPE (arg1) == type)
1737 return arg1;
1738
1739 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1740 {
1741 if (TREE_CODE (arg1) == INTEGER_CST)
1742 {
1743 /* If we would build a constant wider than GCC supports,
1744 leave the conversion unfolded. */
1745 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1746 return NULL_TREE;
1747
1748 /* If we are trying to make a sizetype for a small integer, use
1749 size_int to pick up cached types to reduce duplicate nodes. */
1750 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1751 && !TREE_CONSTANT_OVERFLOW (arg1)
1752 && compare_tree_int (arg1, 10000) < 0)
1753 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1754
1755 /* Given an integer constant, make new constant with new type,
1756 appropriately sign-extended or truncated. */
1757 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1758 TREE_INT_CST_HIGH (arg1));
1759 TREE_TYPE (t) = type;
1760 /* Indicate an overflow if (1) ARG1 already overflowed,
1761 or (2) force_fit_type indicates an overflow.
1762 Tell force_fit_type that an overflow has already occurred
1763 if ARG1 is a too-large unsigned value and T is signed.
1764 But don't indicate an overflow if converting a pointer. */
1765 TREE_OVERFLOW (t)
1766 = ((force_fit_type (t,
1767 (TREE_INT_CST_HIGH (arg1) < 0
1768 && (TYPE_UNSIGNED (type)
1769 < TYPE_UNSIGNED (TREE_TYPE (arg1)))))
1770 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1771 || TREE_OVERFLOW (arg1));
1772 TREE_CONSTANT_OVERFLOW (t)
1773 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1774 return t;
1775 }
1776 else if (TREE_CODE (arg1) == REAL_CST)
1777 {
1778 /* The following code implements the floating point to integer
1779 conversion rules required by the Java Language Specification,
1780 that IEEE NaNs are mapped to zero and values that overflow
1781 the target precision saturate, i.e. values greater than
1782 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1783 are mapped to INT_MIN. These semantics are allowed by the
1784 C and C++ standards that simply state that the behavior of
1785 FP-to-integer conversion is unspecified upon overflow. */
1786
1787 HOST_WIDE_INT high, low;
1788
1789 REAL_VALUE_TYPE r;
1790 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1791
1792 switch (code)
1793 {
1794 case FIX_TRUNC_EXPR:
1795 real_trunc (&r, VOIDmode, &x);
1796 break;
1797
1798 case FIX_CEIL_EXPR:
1799 real_ceil (&r, VOIDmode, &x);
1800 break;
1801
1802 case FIX_FLOOR_EXPR:
1803 real_floor (&r, VOIDmode, &x);
1804 break;
1805
1806 case FIX_ROUND_EXPR:
1807 real_round (&r, VOIDmode, &x);
1808 break;
1809
1810 default:
1811 abort ();
1812 }
1813
1814 /* If R is NaN, return zero and show we have an overflow. */
1815 if (REAL_VALUE_ISNAN (r))
1816 {
1817 overflow = 1;
1818 high = 0;
1819 low = 0;
1820 }
1821
1822 /* See if R is less than the lower bound or greater than the
1823 upper bound. */
1824
1825 if (! overflow)
1826 {
1827 tree lt = TYPE_MIN_VALUE (type);
1828 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1829 if (REAL_VALUES_LESS (r, l))
1830 {
1831 overflow = 1;
1832 high = TREE_INT_CST_HIGH (lt);
1833 low = TREE_INT_CST_LOW (lt);
1834 }
1835 }
1836
1837 if (! overflow)
1838 {
1839 tree ut = TYPE_MAX_VALUE (type);
1840 if (ut)
1841 {
1842 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1843 if (REAL_VALUES_LESS (u, r))
1844 {
1845 overflow = 1;
1846 high = TREE_INT_CST_HIGH (ut);
1847 low = TREE_INT_CST_LOW (ut);
1848 }
1849 }
1850 }
1851
1852 if (! overflow)
1853 REAL_VALUE_TO_INT (&low, &high, r);
1854
1855 t = build_int_2 (low, high);
1856 TREE_TYPE (t) = type;
1857 TREE_OVERFLOW (t)
1858 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1859 TREE_CONSTANT_OVERFLOW (t)
1860 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1861 return t;
1862 }
1863 }
1864 else if (TREE_CODE (type) == REAL_TYPE)
1865 {
1866 if (TREE_CODE (arg1) == INTEGER_CST)
1867 return build_real_from_int_cst (type, arg1);
1868 if (TREE_CODE (arg1) == REAL_CST)
1869 {
1870 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1871 {
1872 /* We make a copy of ARG1 so that we don't modify an
1873 existing constant tree. */
1874 t = copy_node (arg1);
1875 TREE_TYPE (t) = type;
1876 return t;
1877 }
1878
1879 t = build_real (type,
1880 real_value_truncate (TYPE_MODE (type),
1881 TREE_REAL_CST (arg1)));
1882
1883 TREE_OVERFLOW (t)
1884 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1885 TREE_CONSTANT_OVERFLOW (t)
1886 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1887 return t;
1888 }
1889 }
1890 return NULL_TREE;
1891 }
1892
1893 /* Convert expression ARG to type TYPE. Used by the middle-end for
1894 simple conversions in preference to calling the front-end's convert. */
1895
1896 tree
1897 fold_convert (tree type, tree arg)
1898 {
1899 tree orig = TREE_TYPE (arg);
1900 tree tem;
1901
1902 if (type == orig)
1903 return arg;
1904
1905 if (TREE_CODE (arg) == ERROR_MARK
1906 || TREE_CODE (type) == ERROR_MARK
1907 || TREE_CODE (orig) == ERROR_MARK)
1908 return error_mark_node;
1909
1910 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1911 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1912 TYPE_MAIN_VARIANT (orig)))
1913 return fold (build1 (NOP_EXPR, type, arg));
1914
1915 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)
1916 || TREE_CODE (type) == OFFSET_TYPE)
1917 {
1918 if (TREE_CODE (arg) == INTEGER_CST)
1919 {
1920 tem = fold_convert_const (NOP_EXPR, type, arg);
1921 if (tem != NULL_TREE)
1922 return tem;
1923 }
1924 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1925 || TREE_CODE (orig) == OFFSET_TYPE)
1926 return fold (build1 (NOP_EXPR, type, arg));
1927 if (TREE_CODE (orig) == COMPLEX_TYPE)
1928 {
1929 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1930 return fold_convert (type, tem);
1931 }
1932 if (TREE_CODE (orig) == VECTOR_TYPE
1933 && GET_MODE_SIZE (TYPE_MODE (type))
1934 == GET_MODE_SIZE (TYPE_MODE (orig)))
1935 return fold (build1 (NOP_EXPR, type, arg));
1936 }
1937 else if (TREE_CODE (type) == REAL_TYPE)
1938 {
1939 if (TREE_CODE (arg) == INTEGER_CST)
1940 {
1941 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1942 if (tem != NULL_TREE)
1943 return tem;
1944 }
1945 else if (TREE_CODE (arg) == REAL_CST)
1946 {
1947 tem = fold_convert_const (NOP_EXPR, type, arg);
1948 if (tem != NULL_TREE)
1949 return tem;
1950 }
1951
1952 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1953 return fold (build1 (FLOAT_EXPR, type, arg));
1954 if (TREE_CODE (orig) == REAL_TYPE)
1955 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1956 type, arg));
1957 if (TREE_CODE (orig) == COMPLEX_TYPE)
1958 {
1959 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1960 return fold_convert (type, tem);
1961 }
1962 }
1963 else if (TREE_CODE (type) == COMPLEX_TYPE)
1964 {
1965 if (INTEGRAL_TYPE_P (orig)
1966 || POINTER_TYPE_P (orig)
1967 || TREE_CODE (orig) == REAL_TYPE)
1968 return build2 (COMPLEX_EXPR, type,
1969 fold_convert (TREE_TYPE (type), arg),
1970 fold_convert (TREE_TYPE (type), integer_zero_node));
1971 if (TREE_CODE (orig) == COMPLEX_TYPE)
1972 {
1973 tree rpart, ipart;
1974
1975 if (TREE_CODE (arg) == COMPLEX_EXPR)
1976 {
1977 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1978 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1979 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1980 }
1981
1982 arg = save_expr (arg);
1983 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1984 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1985 rpart = fold_convert (TREE_TYPE (type), rpart);
1986 ipart = fold_convert (TREE_TYPE (type), ipart);
1987 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1988 }
1989 }
1990 else if (TREE_CODE (type) == VECTOR_TYPE)
1991 {
1992 if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1993 && GET_MODE_SIZE (TYPE_MODE (type))
1994 == GET_MODE_SIZE (TYPE_MODE (orig)))
1995 return fold (build1 (NOP_EXPR, type, arg));
1996 if (TREE_CODE (orig) == VECTOR_TYPE
1997 && GET_MODE_SIZE (TYPE_MODE (type))
1998 == GET_MODE_SIZE (TYPE_MODE (orig)))
1999 return fold (build1 (NOP_EXPR, type, arg));
2000 }
2001 else if (VOID_TYPE_P (type))
2002 return fold (build1 (CONVERT_EXPR, type, fold_ignored_result (arg)));
2003 abort ();
2004 }
2005 \f
2006 /* Return an expr equal to X but certainly not valid as an lvalue. */
2007
2008 tree
2009 non_lvalue (tree x)
2010 {
2011 /* We only need to wrap lvalue tree codes. */
2012 switch (TREE_CODE (x))
2013 {
2014 case VAR_DECL:
2015 case PARM_DECL:
2016 case RESULT_DECL:
2017 case LABEL_DECL:
2018 case FUNCTION_DECL:
2019 case SSA_NAME:
2020
2021 case COMPONENT_REF:
2022 case INDIRECT_REF:
2023 case ARRAY_REF:
2024 case ARRAY_RANGE_REF:
2025 case BIT_FIELD_REF:
2026 case OBJ_TYPE_REF:
2027
2028 case REALPART_EXPR:
2029 case IMAGPART_EXPR:
2030 case PREINCREMENT_EXPR:
2031 case PREDECREMENT_EXPR:
2032 case SAVE_EXPR:
2033 case UNSAVE_EXPR:
2034 case TRY_CATCH_EXPR:
2035 case WITH_CLEANUP_EXPR:
2036 case COMPOUND_EXPR:
2037 case MODIFY_EXPR:
2038 case TARGET_EXPR:
2039 case COND_EXPR:
2040 case BIND_EXPR:
2041 case MIN_EXPR:
2042 case MAX_EXPR:
2043 break;
2044
2045 default:
2046 /* Assume the worst for front-end tree codes. */
2047 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2048 break;
2049 return x;
2050 }
2051 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2052 }
2053
2054 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2055 Zero means allow extended lvalues. */
2056
2057 int pedantic_lvalues;
2058
2059 /* When pedantic, return an expr equal to X but certainly not valid as a
2060 pedantic lvalue. Otherwise, return X. */
2061
2062 tree
2063 pedantic_non_lvalue (tree x)
2064 {
2065 if (pedantic_lvalues)
2066 return non_lvalue (x);
2067 else
2068 return x;
2069 }
2070 \f
2071 /* Given a tree comparison code, return the code that is the logical inverse
2072 of the given code. It is not safe to do this for floating-point
2073 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2074 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2075
2076 static enum tree_code
2077 invert_tree_comparison (enum tree_code code, bool honor_nans)
2078 {
2079 if (honor_nans && flag_trapping_math)
2080 return ERROR_MARK;
2081
2082 switch (code)
2083 {
2084 case EQ_EXPR:
2085 return NE_EXPR;
2086 case NE_EXPR:
2087 return EQ_EXPR;
2088 case GT_EXPR:
2089 return honor_nans ? UNLE_EXPR : LE_EXPR;
2090 case GE_EXPR:
2091 return honor_nans ? UNLT_EXPR : LT_EXPR;
2092 case LT_EXPR:
2093 return honor_nans ? UNGE_EXPR : GE_EXPR;
2094 case LE_EXPR:
2095 return honor_nans ? UNGT_EXPR : GT_EXPR;
2096 case LTGT_EXPR:
2097 return UNEQ_EXPR;
2098 case UNEQ_EXPR:
2099 return LTGT_EXPR;
2100 case UNGT_EXPR:
2101 return LE_EXPR;
2102 case UNGE_EXPR:
2103 return LT_EXPR;
2104 case UNLT_EXPR:
2105 return GE_EXPR;
2106 case UNLE_EXPR:
2107 return GT_EXPR;
2108 case ORDERED_EXPR:
2109 return UNORDERED_EXPR;
2110 case UNORDERED_EXPR:
2111 return ORDERED_EXPR;
2112 default:
2113 abort ();
2114 }
2115 }
2116
2117 /* Similar, but return the comparison that results if the operands are
2118 swapped. This is safe for floating-point. */
2119
2120 enum tree_code
2121 swap_tree_comparison (enum tree_code code)
2122 {
2123 switch (code)
2124 {
2125 case EQ_EXPR:
2126 case NE_EXPR:
2127 return code;
2128 case GT_EXPR:
2129 return LT_EXPR;
2130 case GE_EXPR:
2131 return LE_EXPR;
2132 case LT_EXPR:
2133 return GT_EXPR;
2134 case LE_EXPR:
2135 return GE_EXPR;
2136 default:
2137 abort ();
2138 }
2139 }
2140
2141
2142 /* Convert a comparison tree code from an enum tree_code representation
2143 into a compcode bit-based encoding. This function is the inverse of
2144 compcode_to_comparison. */
2145
2146 static enum comparison_code
2147 comparison_to_compcode (enum tree_code code)
2148 {
2149 switch (code)
2150 {
2151 case LT_EXPR:
2152 return COMPCODE_LT;
2153 case EQ_EXPR:
2154 return COMPCODE_EQ;
2155 case LE_EXPR:
2156 return COMPCODE_LE;
2157 case GT_EXPR:
2158 return COMPCODE_GT;
2159 case NE_EXPR:
2160 return COMPCODE_NE;
2161 case GE_EXPR:
2162 return COMPCODE_GE;
2163 case ORDERED_EXPR:
2164 return COMPCODE_ORD;
2165 case UNORDERED_EXPR:
2166 return COMPCODE_UNORD;
2167 case UNLT_EXPR:
2168 return COMPCODE_UNLT;
2169 case UNEQ_EXPR:
2170 return COMPCODE_UNEQ;
2171 case UNLE_EXPR:
2172 return COMPCODE_UNLE;
2173 case UNGT_EXPR:
2174 return COMPCODE_UNGT;
2175 case LTGT_EXPR:
2176 return COMPCODE_LTGT;
2177 case UNGE_EXPR:
2178 return COMPCODE_UNGE;
2179 default:
2180 abort ();
2181 }
2182 }
2183
2184 /* Convert a compcode bit-based encoding of a comparison operator back
2185 to GCC's enum tree_code representation. This function is the
2186 inverse of comparison_to_compcode. */
2187
2188 static enum tree_code
2189 compcode_to_comparison (enum comparison_code code)
2190 {
2191 switch (code)
2192 {
2193 case COMPCODE_LT:
2194 return LT_EXPR;
2195 case COMPCODE_EQ:
2196 return EQ_EXPR;
2197 case COMPCODE_LE:
2198 return LE_EXPR;
2199 case COMPCODE_GT:
2200 return GT_EXPR;
2201 case COMPCODE_NE:
2202 return NE_EXPR;
2203 case COMPCODE_GE:
2204 return GE_EXPR;
2205 case COMPCODE_ORD:
2206 return ORDERED_EXPR;
2207 case COMPCODE_UNORD:
2208 return UNORDERED_EXPR;
2209 case COMPCODE_UNLT:
2210 return UNLT_EXPR;
2211 case COMPCODE_UNEQ:
2212 return UNEQ_EXPR;
2213 case COMPCODE_UNLE:
2214 return UNLE_EXPR;
2215 case COMPCODE_UNGT:
2216 return UNGT_EXPR;
2217 case COMPCODE_LTGT:
2218 return LTGT_EXPR;
2219 case COMPCODE_UNGE:
2220 return UNGE_EXPR;
2221 default:
2222 abort ();
2223 }
2224 }
2225
2226 /* Return a tree for the comparison which is the combination of
2227 doing the AND or OR (depending on CODE) of the two operations LCODE
2228 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2229 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2230 if this makes the transformation invalid. */
2231
2232 tree
2233 combine_comparisons (enum tree_code code, enum tree_code lcode,
2234 enum tree_code rcode, tree truth_type,
2235 tree ll_arg, tree lr_arg)
2236 {
2237 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2238 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2239 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2240 enum comparison_code compcode;
2241
2242 switch (code)
2243 {
2244 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2245 compcode = lcompcode & rcompcode;
2246 break;
2247
2248 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2249 compcode = lcompcode | rcompcode;
2250 break;
2251
2252 default:
2253 return NULL_TREE;
2254 }
2255
2256 if (!honor_nans)
2257 {
2258 /* Eliminate unordered comparisons, as well as LTGT and ORD
2259 which are not used unless the mode has NaNs. */
2260 compcode &= ~COMPCODE_UNORD;
2261 if (compcode == COMPCODE_LTGT)
2262 compcode = COMPCODE_NE;
2263 else if (compcode == COMPCODE_ORD)
2264 compcode = COMPCODE_TRUE;
2265 }
2266 else if (flag_trapping_math)
2267 {
2268 /* Check that the original operation and the optimized ones will trap
2269 under the same condition. */
2270 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2271 && (lcompcode != COMPCODE_EQ)
2272 && (lcompcode != COMPCODE_ORD);
2273 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2274 && (rcompcode != COMPCODE_EQ)
2275 && (rcompcode != COMPCODE_ORD);
2276 bool trap = (compcode & COMPCODE_UNORD) == 0
2277 && (compcode != COMPCODE_EQ)
2278 && (compcode != COMPCODE_ORD);
2279
2280 /* In a short-circuited boolean expression the LHS might be
2281 such that the RHS, if evaluated, will never trap. For
2282 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2283 if neither x nor y is NaN. (This is a mixed blessing: for
2284 example, the expression above will never trap, hence
2285 optimizing it to x < y would be invalid). */
2286 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2287 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2288 rtrap = false;
2289
2290 /* If the comparison was short-circuited, and only the RHS
2291 trapped, we may now generate a spurious trap. */
2292 if (rtrap && !ltrap
2293 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2294 return NULL_TREE;
2295
2296 /* If we changed the conditions that cause a trap, we lose. */
2297 if ((ltrap || rtrap) != trap)
2298 return NULL_TREE;
2299 }
2300
2301 if (compcode == COMPCODE_TRUE)
2302 return constant_boolean_node (true, truth_type);
2303 else if (compcode == COMPCODE_FALSE)
2304 return constant_boolean_node (false, truth_type);
2305 else
2306 return fold (build2 (compcode_to_comparison (compcode),
2307 truth_type, ll_arg, lr_arg));
2308 }
2309
2310 /* Return nonzero if CODE is a tree code that represents a truth value. */
2311
2312 static int
2313 truth_value_p (enum tree_code code)
2314 {
2315 return (TREE_CODE_CLASS (code) == '<'
2316 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2317 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2318 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2319 }
2320 \f
2321 /* Return nonzero if two operands (typically of the same tree node)
2322 are necessarily equal. If either argument has side-effects this
2323 function returns zero. FLAGS modifies behavior as follows:
2324
2325 If OEP_ONLY_CONST is set, only return nonzero for constants.
2326 This function tests whether the operands are indistinguishable;
2327 it does not test whether they are equal using C's == operation.
2328 The distinction is important for IEEE floating point, because
2329 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2330 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2331
2332 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2333 even though it may hold multiple values during a function.
2334 This is because a GCC tree node guarantees that nothing else is
2335 executed between the evaluation of its "operands" (which may often
2336 be evaluated in arbitrary order). Hence if the operands themselves
2337 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2338 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2339 unset means assuming isochronic (or instantaneous) tree equivalence.
2340 Unless comparing arbitrary expression trees, such as from different
2341 statements, this flag can usually be left unset.
2342
2343 If OEP_PURE_SAME is set, then pure functions with identical arguments
2344 are considered the same. It is used when the caller has other ways
2345 to ensure that global memory is unchanged in between. */
2346
2347 int
2348 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2349 {
2350 /* If either is ERROR_MARK, they aren't equal. */
2351 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2352 return 0;
2353
2354 /* If both types don't have the same signedness, then we can't consider
2355 them equal. We must check this before the STRIP_NOPS calls
2356 because they may change the signedness of the arguments. */
2357 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2358 return 0;
2359
2360 STRIP_NOPS (arg0);
2361 STRIP_NOPS (arg1);
2362
2363 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2364 /* This is needed for conversions and for COMPONENT_REF.
2365 Might as well play it safe and always test this. */
2366 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2367 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2368 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2369 return 0;
2370
2371 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2372 We don't care about side effects in that case because the SAVE_EXPR
2373 takes care of that for us. In all other cases, two expressions are
2374 equal if they have no side effects. If we have two identical
2375 expressions with side effects that should be treated the same due
2376 to the only side effects being identical SAVE_EXPR's, that will
2377 be detected in the recursive calls below. */
2378 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2379 && (TREE_CODE (arg0) == SAVE_EXPR
2380 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2381 return 1;
2382
2383 /* Next handle constant cases, those for which we can return 1 even
2384 if ONLY_CONST is set. */
2385 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2386 switch (TREE_CODE (arg0))
2387 {
2388 case INTEGER_CST:
2389 return (! TREE_CONSTANT_OVERFLOW (arg0)
2390 && ! TREE_CONSTANT_OVERFLOW (arg1)
2391 && tree_int_cst_equal (arg0, arg1));
2392
2393 case REAL_CST:
2394 return (! TREE_CONSTANT_OVERFLOW (arg0)
2395 && ! TREE_CONSTANT_OVERFLOW (arg1)
2396 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2397 TREE_REAL_CST (arg1)));
2398
2399 case VECTOR_CST:
2400 {
2401 tree v1, v2;
2402
2403 if (TREE_CONSTANT_OVERFLOW (arg0)
2404 || TREE_CONSTANT_OVERFLOW (arg1))
2405 return 0;
2406
2407 v1 = TREE_VECTOR_CST_ELTS (arg0);
2408 v2 = TREE_VECTOR_CST_ELTS (arg1);
2409 while (v1 && v2)
2410 {
2411 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2412 flags))
2413 return 0;
2414 v1 = TREE_CHAIN (v1);
2415 v2 = TREE_CHAIN (v2);
2416 }
2417
2418 return 1;
2419 }
2420
2421 case COMPLEX_CST:
2422 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2423 flags)
2424 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2425 flags));
2426
2427 case STRING_CST:
2428 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2429 && ! memcmp (TREE_STRING_POINTER (arg0),
2430 TREE_STRING_POINTER (arg1),
2431 TREE_STRING_LENGTH (arg0)));
2432
2433 case ADDR_EXPR:
2434 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2435 0);
2436 default:
2437 break;
2438 }
2439
2440 if (flags & OEP_ONLY_CONST)
2441 return 0;
2442
2443 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2444 {
2445 case '1':
2446 /* Two conversions are equal only if signedness and modes match. */
2447 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2448 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
2449 != TYPE_UNSIGNED (TREE_TYPE (arg1))))
2450 return 0;
2451
2452 return operand_equal_p (TREE_OPERAND (arg0, 0),
2453 TREE_OPERAND (arg1, 0), flags);
2454
2455 case '<':
2456 case '2':
2457 if (operand_equal_p (TREE_OPERAND (arg0, 0),
2458 TREE_OPERAND (arg1, 0), flags)
2459 && operand_equal_p (TREE_OPERAND (arg0, 1),
2460 TREE_OPERAND (arg1, 1), flags))
2461 return 1;
2462
2463 /* For commutative ops, allow the other order. */
2464 return (commutative_tree_code (TREE_CODE (arg0))
2465 && operand_equal_p (TREE_OPERAND (arg0, 0),
2466 TREE_OPERAND (arg1, 1), flags)
2467 && operand_equal_p (TREE_OPERAND (arg0, 1),
2468 TREE_OPERAND (arg1, 0), flags));
2469
2470 case 'r':
2471 /* If either of the pointer (or reference) expressions we are
2472 dereferencing contain a side effect, these cannot be equal. */
2473 if (TREE_SIDE_EFFECTS (arg0)
2474 || TREE_SIDE_EFFECTS (arg1))
2475 return 0;
2476
2477 switch (TREE_CODE (arg0))
2478 {
2479 case INDIRECT_REF:
2480 case REALPART_EXPR:
2481 case IMAGPART_EXPR:
2482 return operand_equal_p (TREE_OPERAND (arg0, 0),
2483 TREE_OPERAND (arg1, 0), flags);
2484
2485 case COMPONENT_REF:
2486 case ARRAY_REF:
2487 case ARRAY_RANGE_REF:
2488 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2489 TREE_OPERAND (arg1, 0), flags)
2490 && operand_equal_p (TREE_OPERAND (arg0, 1),
2491 TREE_OPERAND (arg1, 1), flags));
2492
2493 case BIT_FIELD_REF:
2494 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2495 TREE_OPERAND (arg1, 0), flags)
2496 && operand_equal_p (TREE_OPERAND (arg0, 1),
2497 TREE_OPERAND (arg1, 1), flags)
2498 && operand_equal_p (TREE_OPERAND (arg0, 2),
2499 TREE_OPERAND (arg1, 2), flags));
2500 default:
2501 return 0;
2502 }
2503
2504 case 'e':
2505 switch (TREE_CODE (arg0))
2506 {
2507 case ADDR_EXPR:
2508 case TRUTH_NOT_EXPR:
2509 return operand_equal_p (TREE_OPERAND (arg0, 0),
2510 TREE_OPERAND (arg1, 0), flags);
2511
2512 case TRUTH_ANDIF_EXPR:
2513 case TRUTH_ORIF_EXPR:
2514 return operand_equal_p (TREE_OPERAND (arg0, 0),
2515 TREE_OPERAND (arg1, 0), flags)
2516 && operand_equal_p (TREE_OPERAND (arg0, 1),
2517 TREE_OPERAND (arg1, 1), flags);
2518
2519 case TRUTH_AND_EXPR:
2520 case TRUTH_OR_EXPR:
2521 case TRUTH_XOR_EXPR:
2522 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2523 TREE_OPERAND (arg1, 0), flags)
2524 && operand_equal_p (TREE_OPERAND (arg0, 1),
2525 TREE_OPERAND (arg1, 1), flags))
2526 || (operand_equal_p (TREE_OPERAND (arg0, 0),
2527 TREE_OPERAND (arg1, 1), flags)
2528 && operand_equal_p (TREE_OPERAND (arg0, 1),
2529 TREE_OPERAND (arg1, 0), flags));
2530
2531 case CALL_EXPR:
2532 /* If the CALL_EXPRs call different functions, then they
2533 clearly can not be equal. */
2534 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2535 TREE_OPERAND (arg1, 0), flags))
2536 return 0;
2537
2538 {
2539 unsigned int cef = call_expr_flags (arg0);
2540 if (flags & OEP_PURE_SAME)
2541 cef &= ECF_CONST | ECF_PURE;
2542 else
2543 cef &= ECF_CONST;
2544 if (!cef)
2545 return 0;
2546 }
2547
2548 /* Now see if all the arguments are the same. operand_equal_p
2549 does not handle TREE_LIST, so we walk the operands here
2550 feeding them to operand_equal_p. */
2551 arg0 = TREE_OPERAND (arg0, 1);
2552 arg1 = TREE_OPERAND (arg1, 1);
2553 while (arg0 && arg1)
2554 {
2555 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2556 flags))
2557 return 0;
2558
2559 arg0 = TREE_CHAIN (arg0);
2560 arg1 = TREE_CHAIN (arg1);
2561 }
2562
2563 /* If we get here and both argument lists are exhausted
2564 then the CALL_EXPRs are equal. */
2565 return ! (arg0 || arg1);
2566
2567 default:
2568 return 0;
2569 }
2570
2571 case 'd':
2572 /* Consider __builtin_sqrt equal to sqrt. */
2573 return (TREE_CODE (arg0) == FUNCTION_DECL
2574 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2575 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2576 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2577
2578 default:
2579 return 0;
2580 }
2581 }
2582 \f
2583 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2584 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2585
2586 When in doubt, return 0. */
2587
2588 static int
2589 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2590 {
2591 int unsignedp1, unsignedpo;
2592 tree primarg0, primarg1, primother;
2593 unsigned int correct_width;
2594
2595 if (operand_equal_p (arg0, arg1, 0))
2596 return 1;
2597
2598 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2599 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2600 return 0;
2601
2602 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2603 and see if the inner values are the same. This removes any
2604 signedness comparison, which doesn't matter here. */
2605 primarg0 = arg0, primarg1 = arg1;
2606 STRIP_NOPS (primarg0);
2607 STRIP_NOPS (primarg1);
2608 if (operand_equal_p (primarg0, primarg1, 0))
2609 return 1;
2610
2611 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2612 actual comparison operand, ARG0.
2613
2614 First throw away any conversions to wider types
2615 already present in the operands. */
2616
2617 primarg1 = get_narrower (arg1, &unsignedp1);
2618 primother = get_narrower (other, &unsignedpo);
2619
2620 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2621 if (unsignedp1 == unsignedpo
2622 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2623 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2624 {
2625 tree type = TREE_TYPE (arg0);
2626
2627 /* Make sure shorter operand is extended the right way
2628 to match the longer operand. */
2629 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2630 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2631
2632 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2633 return 1;
2634 }
2635
2636 return 0;
2637 }
2638 \f
2639 /* See if ARG is an expression that is either a comparison or is performing
2640 arithmetic on comparisons. The comparisons must only be comparing
2641 two different values, which will be stored in *CVAL1 and *CVAL2; if
2642 they are nonzero it means that some operands have already been found.
2643 No variables may be used anywhere else in the expression except in the
2644 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2645 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2646
2647 If this is true, return 1. Otherwise, return zero. */
2648
2649 static int
2650 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2651 {
2652 enum tree_code code = TREE_CODE (arg);
2653 char class = TREE_CODE_CLASS (code);
2654
2655 /* We can handle some of the 'e' cases here. */
2656 if (class == 'e' && code == TRUTH_NOT_EXPR)
2657 class = '1';
2658 else if (class == 'e'
2659 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2660 || code == COMPOUND_EXPR))
2661 class = '2';
2662
2663 else if (class == 'e' && code == SAVE_EXPR
2664 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2665 {
2666 /* If we've already found a CVAL1 or CVAL2, this expression is
2667 two complex to handle. */
2668 if (*cval1 || *cval2)
2669 return 0;
2670
2671 class = '1';
2672 *save_p = 1;
2673 }
2674
2675 switch (class)
2676 {
2677 case '1':
2678 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2679
2680 case '2':
2681 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2682 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2683 cval1, cval2, save_p));
2684
2685 case 'c':
2686 return 1;
2687
2688 case 'e':
2689 if (code == COND_EXPR)
2690 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2691 cval1, cval2, save_p)
2692 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2693 cval1, cval2, save_p)
2694 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2695 cval1, cval2, save_p));
2696 return 0;
2697
2698 case '<':
2699 /* First see if we can handle the first operand, then the second. For
2700 the second operand, we know *CVAL1 can't be zero. It must be that
2701 one side of the comparison is each of the values; test for the
2702 case where this isn't true by failing if the two operands
2703 are the same. */
2704
2705 if (operand_equal_p (TREE_OPERAND (arg, 0),
2706 TREE_OPERAND (arg, 1), 0))
2707 return 0;
2708
2709 if (*cval1 == 0)
2710 *cval1 = TREE_OPERAND (arg, 0);
2711 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2712 ;
2713 else if (*cval2 == 0)
2714 *cval2 = TREE_OPERAND (arg, 0);
2715 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2716 ;
2717 else
2718 return 0;
2719
2720 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2721 ;
2722 else if (*cval2 == 0)
2723 *cval2 = TREE_OPERAND (arg, 1);
2724 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2725 ;
2726 else
2727 return 0;
2728
2729 return 1;
2730
2731 default:
2732 return 0;
2733 }
2734 }
2735 \f
2736 /* ARG is a tree that is known to contain just arithmetic operations and
2737 comparisons. Evaluate the operations in the tree substituting NEW0 for
2738 any occurrence of OLD0 as an operand of a comparison and likewise for
2739 NEW1 and OLD1. */
2740
2741 static tree
2742 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2743 {
2744 tree type = TREE_TYPE (arg);
2745 enum tree_code code = TREE_CODE (arg);
2746 char class = TREE_CODE_CLASS (code);
2747
2748 /* We can handle some of the 'e' cases here. */
2749 if (class == 'e' && code == TRUTH_NOT_EXPR)
2750 class = '1';
2751 else if (class == 'e'
2752 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2753 class = '2';
2754
2755 switch (class)
2756 {
2757 case '1':
2758 return fold (build1 (code, type,
2759 eval_subst (TREE_OPERAND (arg, 0),
2760 old0, new0, old1, new1)));
2761
2762 case '2':
2763 return fold (build2 (code, type,
2764 eval_subst (TREE_OPERAND (arg, 0),
2765 old0, new0, old1, new1),
2766 eval_subst (TREE_OPERAND (arg, 1),
2767 old0, new0, old1, new1)));
2768
2769 case 'e':
2770 switch (code)
2771 {
2772 case SAVE_EXPR:
2773 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2774
2775 case COMPOUND_EXPR:
2776 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2777
2778 case COND_EXPR:
2779 return fold (build3 (code, type,
2780 eval_subst (TREE_OPERAND (arg, 0),
2781 old0, new0, old1, new1),
2782 eval_subst (TREE_OPERAND (arg, 1),
2783 old0, new0, old1, new1),
2784 eval_subst (TREE_OPERAND (arg, 2),
2785 old0, new0, old1, new1)));
2786 default:
2787 break;
2788 }
2789 /* Fall through - ??? */
2790
2791 case '<':
2792 {
2793 tree arg0 = TREE_OPERAND (arg, 0);
2794 tree arg1 = TREE_OPERAND (arg, 1);
2795
2796 /* We need to check both for exact equality and tree equality. The
2797 former will be true if the operand has a side-effect. In that
2798 case, we know the operand occurred exactly once. */
2799
2800 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2801 arg0 = new0;
2802 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2803 arg0 = new1;
2804
2805 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2806 arg1 = new0;
2807 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2808 arg1 = new1;
2809
2810 return fold (build2 (code, type, arg0, arg1));
2811 }
2812
2813 default:
2814 return arg;
2815 }
2816 }
2817 \f
2818 /* Return a tree for the case when the result of an expression is RESULT
2819 converted to TYPE and OMITTED was previously an operand of the expression
2820 but is now not needed (e.g., we folded OMITTED * 0).
2821
2822 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2823 the conversion of RESULT to TYPE. */
2824
2825 tree
2826 omit_one_operand (tree type, tree result, tree omitted)
2827 {
2828 tree t = fold_convert (type, result);
2829
2830 if (TREE_SIDE_EFFECTS (omitted))
2831 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2832
2833 return non_lvalue (t);
2834 }
2835
2836 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2837
2838 static tree
2839 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2840 {
2841 tree t = fold_convert (type, result);
2842
2843 if (TREE_SIDE_EFFECTS (omitted))
2844 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2845
2846 return pedantic_non_lvalue (t);
2847 }
2848
2849 /* Return a tree for the case when the result of an expression is RESULT
2850 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2851 of the expression but are now not needed.
2852
2853 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2854 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2855 evaluated before OMITTED2. Otherwise, if neither has side effects,
2856 just do the conversion of RESULT to TYPE. */
2857
2858 tree
2859 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2860 {
2861 tree t = fold_convert (type, result);
2862
2863 if (TREE_SIDE_EFFECTS (omitted2))
2864 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2865 if (TREE_SIDE_EFFECTS (omitted1))
2866 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2867
2868 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2869 }
2870
2871 \f
2872 /* Return a simplified tree node for the truth-negation of ARG. This
2873 never alters ARG itself. We assume that ARG is an operation that
2874 returns a truth value (0 or 1).
2875
2876 FIXME: one would think we would fold the result, but it causes
2877 problems with the dominator optimizer. */
2878 tree
2879 invert_truthvalue (tree arg)
2880 {
2881 tree type = TREE_TYPE (arg);
2882 enum tree_code code = TREE_CODE (arg);
2883
2884 if (code == ERROR_MARK)
2885 return arg;
2886
2887 /* If this is a comparison, we can simply invert it, except for
2888 floating-point non-equality comparisons, in which case we just
2889 enclose a TRUTH_NOT_EXPR around what we have. */
2890
2891 if (TREE_CODE_CLASS (code) == '<')
2892 {
2893 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2894 if (FLOAT_TYPE_P (op_type)
2895 && flag_trapping_math
2896 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2897 && code != NE_EXPR && code != EQ_EXPR)
2898 return build1 (TRUTH_NOT_EXPR, type, arg);
2899 else
2900 {
2901 code = invert_tree_comparison (code,
2902 HONOR_NANS (TYPE_MODE (op_type)));
2903 if (code == ERROR_MARK)
2904 return build1 (TRUTH_NOT_EXPR, type, arg);
2905 else
2906 return build2 (code, type,
2907 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2908 }
2909 }
2910
2911 switch (code)
2912 {
2913 case INTEGER_CST:
2914 return fold_convert (type, build_int_2 (integer_zerop (arg), 0));
2915
2916 case TRUTH_AND_EXPR:
2917 return build2 (TRUTH_OR_EXPR, type,
2918 invert_truthvalue (TREE_OPERAND (arg, 0)),
2919 invert_truthvalue (TREE_OPERAND (arg, 1)));
2920
2921 case TRUTH_OR_EXPR:
2922 return build2 (TRUTH_AND_EXPR, type,
2923 invert_truthvalue (TREE_OPERAND (arg, 0)),
2924 invert_truthvalue (TREE_OPERAND (arg, 1)));
2925
2926 case TRUTH_XOR_EXPR:
2927 /* Here we can invert either operand. We invert the first operand
2928 unless the second operand is a TRUTH_NOT_EXPR in which case our
2929 result is the XOR of the first operand with the inside of the
2930 negation of the second operand. */
2931
2932 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2933 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2934 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2935 else
2936 return build2 (TRUTH_XOR_EXPR, type,
2937 invert_truthvalue (TREE_OPERAND (arg, 0)),
2938 TREE_OPERAND (arg, 1));
2939
2940 case TRUTH_ANDIF_EXPR:
2941 return build2 (TRUTH_ORIF_EXPR, type,
2942 invert_truthvalue (TREE_OPERAND (arg, 0)),
2943 invert_truthvalue (TREE_OPERAND (arg, 1)));
2944
2945 case TRUTH_ORIF_EXPR:
2946 return build2 (TRUTH_ANDIF_EXPR, type,
2947 invert_truthvalue (TREE_OPERAND (arg, 0)),
2948 invert_truthvalue (TREE_OPERAND (arg, 1)));
2949
2950 case TRUTH_NOT_EXPR:
2951 return TREE_OPERAND (arg, 0);
2952
2953 case COND_EXPR:
2954 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2955 invert_truthvalue (TREE_OPERAND (arg, 1)),
2956 invert_truthvalue (TREE_OPERAND (arg, 2)));
2957
2958 case COMPOUND_EXPR:
2959 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2960 invert_truthvalue (TREE_OPERAND (arg, 1)));
2961
2962 case NON_LVALUE_EXPR:
2963 return invert_truthvalue (TREE_OPERAND (arg, 0));
2964
2965 case NOP_EXPR:
2966 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
2967 break;
2968
2969 case CONVERT_EXPR:
2970 case FLOAT_EXPR:
2971 return build1 (TREE_CODE (arg), type,
2972 invert_truthvalue (TREE_OPERAND (arg, 0)));
2973
2974 case BIT_AND_EXPR:
2975 if (!integer_onep (TREE_OPERAND (arg, 1)))
2976 break;
2977 return build2 (EQ_EXPR, type, arg,
2978 fold_convert (type, integer_zero_node));
2979
2980 case SAVE_EXPR:
2981 return build1 (TRUTH_NOT_EXPR, type, arg);
2982
2983 case CLEANUP_POINT_EXPR:
2984 return build1 (CLEANUP_POINT_EXPR, type,
2985 invert_truthvalue (TREE_OPERAND (arg, 0)));
2986
2987 default:
2988 break;
2989 }
2990 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2991 abort ();
2992 return build1 (TRUTH_NOT_EXPR, type, arg);
2993 }
2994
2995 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2996 operands are another bit-wise operation with a common input. If so,
2997 distribute the bit operations to save an operation and possibly two if
2998 constants are involved. For example, convert
2999 (A | B) & (A | C) into A | (B & C)
3000 Further simplification will occur if B and C are constants.
3001
3002 If this optimization cannot be done, 0 will be returned. */
3003
3004 static tree
3005 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3006 {
3007 tree common;
3008 tree left, right;
3009
3010 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3011 || TREE_CODE (arg0) == code
3012 || (TREE_CODE (arg0) != BIT_AND_EXPR
3013 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3014 return 0;
3015
3016 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3017 {
3018 common = TREE_OPERAND (arg0, 0);
3019 left = TREE_OPERAND (arg0, 1);
3020 right = TREE_OPERAND (arg1, 1);
3021 }
3022 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3023 {
3024 common = TREE_OPERAND (arg0, 0);
3025 left = TREE_OPERAND (arg0, 1);
3026 right = TREE_OPERAND (arg1, 0);
3027 }
3028 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3029 {
3030 common = TREE_OPERAND (arg0, 1);
3031 left = TREE_OPERAND (arg0, 0);
3032 right = TREE_OPERAND (arg1, 1);
3033 }
3034 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3035 {
3036 common = TREE_OPERAND (arg0, 1);
3037 left = TREE_OPERAND (arg0, 0);
3038 right = TREE_OPERAND (arg1, 0);
3039 }
3040 else
3041 return 0;
3042
3043 return fold (build2 (TREE_CODE (arg0), type, common,
3044 fold (build2 (code, type, left, right))));
3045 }
3046 \f
3047 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3048 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3049
3050 static tree
3051 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3052 int unsignedp)
3053 {
3054 tree result = build3 (BIT_FIELD_REF, type, inner,
3055 size_int (bitsize), bitsize_int (bitpos));
3056
3057 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3058
3059 return result;
3060 }
3061
3062 /* Optimize a bit-field compare.
3063
3064 There are two cases: First is a compare against a constant and the
3065 second is a comparison of two items where the fields are at the same
3066 bit position relative to the start of a chunk (byte, halfword, word)
3067 large enough to contain it. In these cases we can avoid the shift
3068 implicit in bitfield extractions.
3069
3070 For constants, we emit a compare of the shifted constant with the
3071 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3072 compared. For two fields at the same position, we do the ANDs with the
3073 similar mask and compare the result of the ANDs.
3074
3075 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3076 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3077 are the left and right operands of the comparison, respectively.
3078
3079 If the optimization described above can be done, we return the resulting
3080 tree. Otherwise we return zero. */
3081
3082 static tree
3083 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3084 tree lhs, tree rhs)
3085 {
3086 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3087 tree type = TREE_TYPE (lhs);
3088 tree signed_type, unsigned_type;
3089 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3090 enum machine_mode lmode, rmode, nmode;
3091 int lunsignedp, runsignedp;
3092 int lvolatilep = 0, rvolatilep = 0;
3093 tree linner, rinner = NULL_TREE;
3094 tree mask;
3095 tree offset;
3096
3097 /* Get all the information about the extractions being done. If the bit size
3098 if the same as the size of the underlying object, we aren't doing an
3099 extraction at all and so can do nothing. We also don't want to
3100 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3101 then will no longer be able to replace it. */
3102 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3103 &lunsignedp, &lvolatilep);
3104 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3105 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3106 return 0;
3107
3108 if (!const_p)
3109 {
3110 /* If this is not a constant, we can only do something if bit positions,
3111 sizes, and signedness are the same. */
3112 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3113 &runsignedp, &rvolatilep);
3114
3115 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3116 || lunsignedp != runsignedp || offset != 0
3117 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3118 return 0;
3119 }
3120
3121 /* See if we can find a mode to refer to this field. We should be able to,
3122 but fail if we can't. */
3123 nmode = get_best_mode (lbitsize, lbitpos,
3124 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3125 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3126 TYPE_ALIGN (TREE_TYPE (rinner))),
3127 word_mode, lvolatilep || rvolatilep);
3128 if (nmode == VOIDmode)
3129 return 0;
3130
3131 /* Set signed and unsigned types of the precision of this mode for the
3132 shifts below. */
3133 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3134 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3135
3136 /* Compute the bit position and size for the new reference and our offset
3137 within it. If the new reference is the same size as the original, we
3138 won't optimize anything, so return zero. */
3139 nbitsize = GET_MODE_BITSIZE (nmode);
3140 nbitpos = lbitpos & ~ (nbitsize - 1);
3141 lbitpos -= nbitpos;
3142 if (nbitsize == lbitsize)
3143 return 0;
3144
3145 if (BYTES_BIG_ENDIAN)
3146 lbitpos = nbitsize - lbitsize - lbitpos;
3147
3148 /* Make the mask to be used against the extracted field. */
3149 mask = build_int_2 (~0, ~0);
3150 TREE_TYPE (mask) = unsigned_type;
3151 force_fit_type (mask, 0);
3152 mask = fold_convert (unsigned_type, mask);
3153 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3154 mask = const_binop (RSHIFT_EXPR, mask,
3155 size_int (nbitsize - lbitsize - lbitpos), 0);
3156
3157 if (! const_p)
3158 /* If not comparing with constant, just rework the comparison
3159 and return. */
3160 return build2 (code, compare_type,
3161 build2 (BIT_AND_EXPR, unsigned_type,
3162 make_bit_field_ref (linner, unsigned_type,
3163 nbitsize, nbitpos, 1),
3164 mask),
3165 build2 (BIT_AND_EXPR, unsigned_type,
3166 make_bit_field_ref (rinner, unsigned_type,
3167 nbitsize, nbitpos, 1),
3168 mask));
3169
3170 /* Otherwise, we are handling the constant case. See if the constant is too
3171 big for the field. Warn and return a tree of for 0 (false) if so. We do
3172 this not only for its own sake, but to avoid having to test for this
3173 error case below. If we didn't, we might generate wrong code.
3174
3175 For unsigned fields, the constant shifted right by the field length should
3176 be all zero. For signed fields, the high-order bits should agree with
3177 the sign bit. */
3178
3179 if (lunsignedp)
3180 {
3181 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3182 fold_convert (unsigned_type, rhs),
3183 size_int (lbitsize), 0)))
3184 {
3185 warning ("comparison is always %d due to width of bit-field",
3186 code == NE_EXPR);
3187 return constant_boolean_node (code == NE_EXPR, compare_type);
3188 }
3189 }
3190 else
3191 {
3192 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3193 size_int (lbitsize - 1), 0);
3194 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3195 {
3196 warning ("comparison is always %d due to width of bit-field",
3197 code == NE_EXPR);
3198 return constant_boolean_node (code == NE_EXPR, compare_type);
3199 }
3200 }
3201
3202 /* Single-bit compares should always be against zero. */
3203 if (lbitsize == 1 && ! integer_zerop (rhs))
3204 {
3205 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3206 rhs = fold_convert (type, integer_zero_node);
3207 }
3208
3209 /* Make a new bitfield reference, shift the constant over the
3210 appropriate number of bits and mask it with the computed mask
3211 (in case this was a signed field). If we changed it, make a new one. */
3212 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3213 if (lvolatilep)
3214 {
3215 TREE_SIDE_EFFECTS (lhs) = 1;
3216 TREE_THIS_VOLATILE (lhs) = 1;
3217 }
3218
3219 rhs = fold (const_binop (BIT_AND_EXPR,
3220 const_binop (LSHIFT_EXPR,
3221 fold_convert (unsigned_type, rhs),
3222 size_int (lbitpos), 0),
3223 mask, 0));
3224
3225 return build2 (code, compare_type,
3226 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3227 rhs);
3228 }
3229 \f
3230 /* Subroutine for fold_truthop: decode a field reference.
3231
3232 If EXP is a comparison reference, we return the innermost reference.
3233
3234 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3235 set to the starting bit number.
3236
3237 If the innermost field can be completely contained in a mode-sized
3238 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3239
3240 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3241 otherwise it is not changed.
3242
3243 *PUNSIGNEDP is set to the signedness of the field.
3244
3245 *PMASK is set to the mask used. This is either contained in a
3246 BIT_AND_EXPR or derived from the width of the field.
3247
3248 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3249
3250 Return 0 if this is not a component reference or is one that we can't
3251 do anything with. */
3252
3253 static tree
3254 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3255 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3256 int *punsignedp, int *pvolatilep,
3257 tree *pmask, tree *pand_mask)
3258 {
3259 tree outer_type = 0;
3260 tree and_mask = 0;
3261 tree mask, inner, offset;
3262 tree unsigned_type;
3263 unsigned int precision;
3264
3265 /* All the optimizations using this function assume integer fields.
3266 There are problems with FP fields since the type_for_size call
3267 below can fail for, e.g., XFmode. */
3268 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3269 return 0;
3270
3271 /* We are interested in the bare arrangement of bits, so strip everything
3272 that doesn't affect the machine mode. However, record the type of the
3273 outermost expression if it may matter below. */
3274 if (TREE_CODE (exp) == NOP_EXPR
3275 || TREE_CODE (exp) == CONVERT_EXPR
3276 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3277 outer_type = TREE_TYPE (exp);
3278 STRIP_NOPS (exp);
3279
3280 if (TREE_CODE (exp) == BIT_AND_EXPR)
3281 {
3282 and_mask = TREE_OPERAND (exp, 1);
3283 exp = TREE_OPERAND (exp, 0);
3284 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3285 if (TREE_CODE (and_mask) != INTEGER_CST)
3286 return 0;
3287 }
3288
3289 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3290 punsignedp, pvolatilep);
3291 if ((inner == exp && and_mask == 0)
3292 || *pbitsize < 0 || offset != 0
3293 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3294 return 0;
3295
3296 /* If the number of bits in the reference is the same as the bitsize of
3297 the outer type, then the outer type gives the signedness. Otherwise
3298 (in case of a small bitfield) the signedness is unchanged. */
3299 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3300 *punsignedp = TYPE_UNSIGNED (outer_type);
3301
3302 /* Compute the mask to access the bitfield. */
3303 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3304 precision = TYPE_PRECISION (unsigned_type);
3305
3306 mask = build_int_2 (~0, ~0);
3307 TREE_TYPE (mask) = unsigned_type;
3308 force_fit_type (mask, 0);
3309 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3310 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3311
3312 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3313 if (and_mask != 0)
3314 mask = fold (build2 (BIT_AND_EXPR, unsigned_type,
3315 fold_convert (unsigned_type, and_mask), mask));
3316
3317 *pmask = mask;
3318 *pand_mask = and_mask;
3319 return inner;
3320 }
3321
3322 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3323 bit positions. */
3324
3325 static int
3326 all_ones_mask_p (tree mask, int size)
3327 {
3328 tree type = TREE_TYPE (mask);
3329 unsigned int precision = TYPE_PRECISION (type);
3330 tree tmask;
3331
3332 tmask = build_int_2 (~0, ~0);
3333 TREE_TYPE (tmask) = lang_hooks.types.signed_type (type);
3334 force_fit_type (tmask, 0);
3335 return
3336 tree_int_cst_equal (mask,
3337 const_binop (RSHIFT_EXPR,
3338 const_binop (LSHIFT_EXPR, tmask,
3339 size_int (precision - size),
3340 0),
3341 size_int (precision - size), 0));
3342 }
3343
3344 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3345 represents the sign bit of EXP's type. If EXP represents a sign
3346 or zero extension, also test VAL against the unextended type.
3347 The return value is the (sub)expression whose sign bit is VAL,
3348 or NULL_TREE otherwise. */
3349
3350 static tree
3351 sign_bit_p (tree exp, tree val)
3352 {
3353 unsigned HOST_WIDE_INT mask_lo, lo;
3354 HOST_WIDE_INT mask_hi, hi;
3355 int width;
3356 tree t;
3357
3358 /* Tree EXP must have an integral type. */
3359 t = TREE_TYPE (exp);
3360 if (! INTEGRAL_TYPE_P (t))
3361 return NULL_TREE;
3362
3363 /* Tree VAL must be an integer constant. */
3364 if (TREE_CODE (val) != INTEGER_CST
3365 || TREE_CONSTANT_OVERFLOW (val))
3366 return NULL_TREE;
3367
3368 width = TYPE_PRECISION (t);
3369 if (width > HOST_BITS_PER_WIDE_INT)
3370 {
3371 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3372 lo = 0;
3373
3374 mask_hi = ((unsigned HOST_WIDE_INT) -1
3375 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3376 mask_lo = -1;
3377 }
3378 else
3379 {
3380 hi = 0;
3381 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3382
3383 mask_hi = 0;
3384 mask_lo = ((unsigned HOST_WIDE_INT) -1
3385 >> (HOST_BITS_PER_WIDE_INT - width));
3386 }
3387
3388 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3389 treat VAL as if it were unsigned. */
3390 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3391 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3392 return exp;
3393
3394 /* Handle extension from a narrower type. */
3395 if (TREE_CODE (exp) == NOP_EXPR
3396 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3397 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3398
3399 return NULL_TREE;
3400 }
3401
3402 /* Subroutine for fold_truthop: determine if an operand is simple enough
3403 to be evaluated unconditionally. */
3404
3405 static int
3406 simple_operand_p (tree exp)
3407 {
3408 /* Strip any conversions that don't change the machine mode. */
3409 while ((TREE_CODE (exp) == NOP_EXPR
3410 || TREE_CODE (exp) == CONVERT_EXPR)
3411 && (TYPE_MODE (TREE_TYPE (exp))
3412 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3413 exp = TREE_OPERAND (exp, 0);
3414
3415 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3416 || (DECL_P (exp)
3417 && ! TREE_ADDRESSABLE (exp)
3418 && ! TREE_THIS_VOLATILE (exp)
3419 && ! DECL_NONLOCAL (exp)
3420 /* Don't regard global variables as simple. They may be
3421 allocated in ways unknown to the compiler (shared memory,
3422 #pragma weak, etc). */
3423 && ! TREE_PUBLIC (exp)
3424 && ! DECL_EXTERNAL (exp)
3425 /* Loading a static variable is unduly expensive, but global
3426 registers aren't expensive. */
3427 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3428 }
3429 \f
3430 /* The following functions are subroutines to fold_range_test and allow it to
3431 try to change a logical combination of comparisons into a range test.
3432
3433 For example, both
3434 X == 2 || X == 3 || X == 4 || X == 5
3435 and
3436 X >= 2 && X <= 5
3437 are converted to
3438 (unsigned) (X - 2) <= 3
3439
3440 We describe each set of comparisons as being either inside or outside
3441 a range, using a variable named like IN_P, and then describe the
3442 range with a lower and upper bound. If one of the bounds is omitted,
3443 it represents either the highest or lowest value of the type.
3444
3445 In the comments below, we represent a range by two numbers in brackets
3446 preceded by a "+" to designate being inside that range, or a "-" to
3447 designate being outside that range, so the condition can be inverted by
3448 flipping the prefix. An omitted bound is represented by a "-". For
3449 example, "- [-, 10]" means being outside the range starting at the lowest
3450 possible value and ending at 10, in other words, being greater than 10.
3451 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3452 always false.
3453
3454 We set up things so that the missing bounds are handled in a consistent
3455 manner so neither a missing bound nor "true" and "false" need to be
3456 handled using a special case. */
3457
3458 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3459 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3460 and UPPER1_P are nonzero if the respective argument is an upper bound
3461 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3462 must be specified for a comparison. ARG1 will be converted to ARG0's
3463 type if both are specified. */
3464
3465 static tree
3466 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3467 tree arg1, int upper1_p)
3468 {
3469 tree tem;
3470 int result;
3471 int sgn0, sgn1;
3472
3473 /* If neither arg represents infinity, do the normal operation.
3474 Else, if not a comparison, return infinity. Else handle the special
3475 comparison rules. Note that most of the cases below won't occur, but
3476 are handled for consistency. */
3477
3478 if (arg0 != 0 && arg1 != 0)
3479 {
3480 tem = fold (build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3481 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3482 STRIP_NOPS (tem);
3483 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3484 }
3485
3486 if (TREE_CODE_CLASS (code) != '<')
3487 return 0;
3488
3489 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3490 for neither. In real maths, we cannot assume open ended ranges are
3491 the same. But, this is computer arithmetic, where numbers are finite.
3492 We can therefore make the transformation of any unbounded range with
3493 the value Z, Z being greater than any representable number. This permits
3494 us to treat unbounded ranges as equal. */
3495 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3496 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3497 switch (code)
3498 {
3499 case EQ_EXPR:
3500 result = sgn0 == sgn1;
3501 break;
3502 case NE_EXPR:
3503 result = sgn0 != sgn1;
3504 break;
3505 case LT_EXPR:
3506 result = sgn0 < sgn1;
3507 break;
3508 case LE_EXPR:
3509 result = sgn0 <= sgn1;
3510 break;
3511 case GT_EXPR:
3512 result = sgn0 > sgn1;
3513 break;
3514 case GE_EXPR:
3515 result = sgn0 >= sgn1;
3516 break;
3517 default:
3518 abort ();
3519 }
3520
3521 return constant_boolean_node (result, type);
3522 }
3523 \f
3524 /* Given EXP, a logical expression, set the range it is testing into
3525 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3526 actually being tested. *PLOW and *PHIGH will be made of the same type
3527 as the returned expression. If EXP is not a comparison, we will most
3528 likely not be returning a useful value and range. */
3529
3530 static tree
3531 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3532 {
3533 enum tree_code code;
3534 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3535 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3536 int in_p, n_in_p;
3537 tree low, high, n_low, n_high;
3538
3539 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3540 and see if we can refine the range. Some of the cases below may not
3541 happen, but it doesn't seem worth worrying about this. We "continue"
3542 the outer loop when we've changed something; otherwise we "break"
3543 the switch, which will "break" the while. */
3544
3545 in_p = 0;
3546 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3547
3548 while (1)
3549 {
3550 code = TREE_CODE (exp);
3551 exp_type = TREE_TYPE (exp);
3552
3553 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3554 {
3555 if (first_rtl_op (code) > 0)
3556 arg0 = TREE_OPERAND (exp, 0);
3557 if (TREE_CODE_CLASS (code) == '<'
3558 || TREE_CODE_CLASS (code) == '1'
3559 || TREE_CODE_CLASS (code) == '2')
3560 arg0_type = TREE_TYPE (arg0);
3561 if (TREE_CODE_CLASS (code) == '2'
3562 || TREE_CODE_CLASS (code) == '<'
3563 || (TREE_CODE_CLASS (code) == 'e'
3564 && TREE_CODE_LENGTH (code) > 1))
3565 arg1 = TREE_OPERAND (exp, 1);
3566 }
3567
3568 switch (code)
3569 {
3570 case TRUTH_NOT_EXPR:
3571 in_p = ! in_p, exp = arg0;
3572 continue;
3573
3574 case EQ_EXPR: case NE_EXPR:
3575 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3576 /* We can only do something if the range is testing for zero
3577 and if the second operand is an integer constant. Note that
3578 saying something is "in" the range we make is done by
3579 complementing IN_P since it will set in the initial case of
3580 being not equal to zero; "out" is leaving it alone. */
3581 if (low == 0 || high == 0
3582 || ! integer_zerop (low) || ! integer_zerop (high)
3583 || TREE_CODE (arg1) != INTEGER_CST)
3584 break;
3585
3586 switch (code)
3587 {
3588 case NE_EXPR: /* - [c, c] */
3589 low = high = arg1;
3590 break;
3591 case EQ_EXPR: /* + [c, c] */
3592 in_p = ! in_p, low = high = arg1;
3593 break;
3594 case GT_EXPR: /* - [-, c] */
3595 low = 0, high = arg1;
3596 break;
3597 case GE_EXPR: /* + [c, -] */
3598 in_p = ! in_p, low = arg1, high = 0;
3599 break;
3600 case LT_EXPR: /* - [c, -] */
3601 low = arg1, high = 0;
3602 break;
3603 case LE_EXPR: /* + [-, c] */
3604 in_p = ! in_p, low = 0, high = arg1;
3605 break;
3606 default:
3607 abort ();
3608 }
3609
3610 /* If this is an unsigned comparison, we also know that EXP is
3611 greater than or equal to zero. We base the range tests we make
3612 on that fact, so we record it here so we can parse existing
3613 range tests. We test arg0_type since often the return type
3614 of, e.g. EQ_EXPR, is boolean. */
3615 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3616 {
3617 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3618 1, fold_convert (arg0_type, integer_zero_node),
3619 NULL_TREE))
3620 break;
3621
3622 in_p = n_in_p, low = n_low, high = n_high;
3623
3624 /* If the high bound is missing, but we have a nonzero low
3625 bound, reverse the range so it goes from zero to the low bound
3626 minus 1. */
3627 if (high == 0 && low && ! integer_zerop (low))
3628 {
3629 in_p = ! in_p;
3630 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3631 integer_one_node, 0);
3632 low = fold_convert (arg0_type, integer_zero_node);
3633 }
3634 }
3635
3636 exp = arg0;
3637 continue;
3638
3639 case NEGATE_EXPR:
3640 /* (-x) IN [a,b] -> x in [-b, -a] */
3641 n_low = range_binop (MINUS_EXPR, exp_type,
3642 fold_convert (exp_type, integer_zero_node),
3643 0, high, 1);
3644 n_high = range_binop (MINUS_EXPR, exp_type,
3645 fold_convert (exp_type, integer_zero_node),
3646 0, low, 0);
3647 low = n_low, high = n_high;
3648 exp = arg0;
3649 continue;
3650
3651 case BIT_NOT_EXPR:
3652 /* ~ X -> -X - 1 */
3653 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3654 fold_convert (exp_type, integer_one_node));
3655 continue;
3656
3657 case PLUS_EXPR: case MINUS_EXPR:
3658 if (TREE_CODE (arg1) != INTEGER_CST)
3659 break;
3660
3661 /* If EXP is signed, any overflow in the computation is undefined,
3662 so we don't worry about it so long as our computations on
3663 the bounds don't overflow. For unsigned, overflow is defined
3664 and this is exactly the right thing. */
3665 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3666 arg0_type, low, 0, arg1, 0);
3667 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3668 arg0_type, high, 1, arg1, 0);
3669 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3670 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3671 break;
3672
3673 /* Check for an unsigned range which has wrapped around the maximum
3674 value thus making n_high < n_low, and normalize it. */
3675 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3676 {
3677 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3678 integer_one_node, 0);
3679 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3680 integer_one_node, 0);
3681
3682 /* If the range is of the form +/- [ x+1, x ], we won't
3683 be able to normalize it. But then, it represents the
3684 whole range or the empty set, so make it
3685 +/- [ -, - ]. */
3686 if (tree_int_cst_equal (n_low, low)
3687 && tree_int_cst_equal (n_high, high))
3688 low = high = 0;
3689 else
3690 in_p = ! in_p;
3691 }
3692 else
3693 low = n_low, high = n_high;
3694
3695 exp = arg0;
3696 continue;
3697
3698 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3699 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3700 break;
3701
3702 if (! INTEGRAL_TYPE_P (arg0_type)
3703 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3704 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3705 break;
3706
3707 n_low = low, n_high = high;
3708
3709 if (n_low != 0)
3710 n_low = fold_convert (arg0_type, n_low);
3711
3712 if (n_high != 0)
3713 n_high = fold_convert (arg0_type, n_high);
3714
3715
3716 /* If we're converting arg0 from an unsigned type, to exp,
3717 a signed type, we will be doing the comparison as unsigned.
3718 The tests above have already verified that LOW and HIGH
3719 are both positive.
3720
3721 So we have to ensure that we will handle large unsigned
3722 values the same way that the current signed bounds treat
3723 negative values. */
3724
3725 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3726 {
3727 tree high_positive;
3728 tree equiv_type = lang_hooks.types.type_for_mode
3729 (TYPE_MODE (arg0_type), 1);
3730
3731 /* A range without an upper bound is, naturally, unbounded.
3732 Since convert would have cropped a very large value, use
3733 the max value for the destination type. */
3734 high_positive
3735 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3736 : TYPE_MAX_VALUE (arg0_type);
3737
3738 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3739 high_positive = fold (build2 (RSHIFT_EXPR, arg0_type,
3740 fold_convert (arg0_type,
3741 high_positive),
3742 fold_convert (arg0_type,
3743 integer_one_node)));
3744
3745 /* If the low bound is specified, "and" the range with the
3746 range for which the original unsigned value will be
3747 positive. */
3748 if (low != 0)
3749 {
3750 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3751 1, n_low, n_high, 1,
3752 fold_convert (arg0_type, integer_zero_node),
3753 high_positive))
3754 break;
3755
3756 in_p = (n_in_p == in_p);
3757 }
3758 else
3759 {
3760 /* Otherwise, "or" the range with the range of the input
3761 that will be interpreted as negative. */
3762 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3763 0, n_low, n_high, 1,
3764 fold_convert (arg0_type, integer_zero_node),
3765 high_positive))
3766 break;
3767
3768 in_p = (in_p != n_in_p);
3769 }
3770 }
3771
3772 exp = arg0;
3773 low = n_low, high = n_high;
3774 continue;
3775
3776 default:
3777 break;
3778 }
3779
3780 break;
3781 }
3782
3783 /* If EXP is a constant, we can evaluate whether this is true or false. */
3784 if (TREE_CODE (exp) == INTEGER_CST)
3785 {
3786 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3787 exp, 0, low, 0))
3788 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3789 exp, 1, high, 1)));
3790 low = high = 0;
3791 exp = 0;
3792 }
3793
3794 *pin_p = in_p, *plow = low, *phigh = high;
3795 return exp;
3796 }
3797 \f
3798 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3799 type, TYPE, return an expression to test if EXP is in (or out of, depending
3800 on IN_P) the range. Return 0 if the test couldn't be created. */
3801
3802 static tree
3803 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3804 {
3805 tree etype = TREE_TYPE (exp);
3806 tree value;
3807
3808 if (! in_p)
3809 {
3810 value = build_range_check (type, exp, 1, low, high);
3811 if (value != 0)
3812 return invert_truthvalue (value);
3813
3814 return 0;
3815 }
3816
3817 if (low == 0 && high == 0)
3818 return fold_convert (type, integer_one_node);
3819
3820 if (low == 0)
3821 return fold (build2 (LE_EXPR, type, exp, high));
3822
3823 if (high == 0)
3824 return fold (build2 (GE_EXPR, type, exp, low));
3825
3826 if (operand_equal_p (low, high, 0))
3827 return fold (build2 (EQ_EXPR, type, exp, low));
3828
3829 if (integer_zerop (low))
3830 {
3831 if (! TYPE_UNSIGNED (etype))
3832 {
3833 etype = lang_hooks.types.unsigned_type (etype);
3834 high = fold_convert (etype, high);
3835 exp = fold_convert (etype, exp);
3836 }
3837 return build_range_check (type, exp, 1, 0, high);
3838 }
3839
3840 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3841 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3842 {
3843 unsigned HOST_WIDE_INT lo;
3844 HOST_WIDE_INT hi;
3845 int prec;
3846
3847 prec = TYPE_PRECISION (etype);
3848 if (prec <= HOST_BITS_PER_WIDE_INT)
3849 {
3850 hi = 0;
3851 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3852 }
3853 else
3854 {
3855 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3856 lo = (unsigned HOST_WIDE_INT) -1;
3857 }
3858
3859 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3860 {
3861 if (TYPE_UNSIGNED (etype))
3862 {
3863 etype = lang_hooks.types.signed_type (etype);
3864 exp = fold_convert (etype, exp);
3865 }
3866 return fold (build2 (GT_EXPR, type, exp,
3867 fold_convert (etype, integer_zero_node)));
3868 }
3869 }
3870
3871 value = const_binop (MINUS_EXPR, high, low, 0);
3872 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3873 {
3874 tree utype, minv, maxv;
3875
3876 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3877 for the type in question, as we rely on this here. */
3878 switch (TREE_CODE (etype))
3879 {
3880 case INTEGER_TYPE:
3881 case ENUMERAL_TYPE:
3882 case CHAR_TYPE:
3883 utype = lang_hooks.types.unsigned_type (etype);
3884 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3885 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3886 integer_one_node, 1);
3887 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
3888 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
3889 minv, 1, maxv, 1)))
3890 {
3891 etype = utype;
3892 high = fold_convert (etype, high);
3893 low = fold_convert (etype, low);
3894 exp = fold_convert (etype, exp);
3895 value = const_binop (MINUS_EXPR, high, low, 0);
3896 }
3897 break;
3898 default:
3899 break;
3900 }
3901 }
3902
3903 if (value != 0 && ! TREE_OVERFLOW (value))
3904 return build_range_check (type,
3905 fold (build2 (MINUS_EXPR, etype, exp, low)),
3906 1, fold_convert (etype, integer_zero_node),
3907 value);
3908
3909 return 0;
3910 }
3911 \f
3912 /* Given two ranges, see if we can merge them into one. Return 1 if we
3913 can, 0 if we can't. Set the output range into the specified parameters. */
3914
3915 static int
3916 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3917 tree high0, int in1_p, tree low1, tree high1)
3918 {
3919 int no_overlap;
3920 int subset;
3921 int temp;
3922 tree tem;
3923 int in_p;
3924 tree low, high;
3925 int lowequal = ((low0 == 0 && low1 == 0)
3926 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3927 low0, 0, low1, 0)));
3928 int highequal = ((high0 == 0 && high1 == 0)
3929 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3930 high0, 1, high1, 1)));
3931
3932 /* Make range 0 be the range that starts first, or ends last if they
3933 start at the same value. Swap them if it isn't. */
3934 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3935 low0, 0, low1, 0))
3936 || (lowequal
3937 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3938 high1, 1, high0, 1))))
3939 {
3940 temp = in0_p, in0_p = in1_p, in1_p = temp;
3941 tem = low0, low0 = low1, low1 = tem;
3942 tem = high0, high0 = high1, high1 = tem;
3943 }
3944
3945 /* Now flag two cases, whether the ranges are disjoint or whether the
3946 second range is totally subsumed in the first. Note that the tests
3947 below are simplified by the ones above. */
3948 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3949 high0, 1, low1, 0));
3950 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3951 high1, 1, high0, 1));
3952
3953 /* We now have four cases, depending on whether we are including or
3954 excluding the two ranges. */
3955 if (in0_p && in1_p)
3956 {
3957 /* If they don't overlap, the result is false. If the second range
3958 is a subset it is the result. Otherwise, the range is from the start
3959 of the second to the end of the first. */
3960 if (no_overlap)
3961 in_p = 0, low = high = 0;
3962 else if (subset)
3963 in_p = 1, low = low1, high = high1;
3964 else
3965 in_p = 1, low = low1, high = high0;
3966 }
3967
3968 else if (in0_p && ! in1_p)
3969 {
3970 /* If they don't overlap, the result is the first range. If they are
3971 equal, the result is false. If the second range is a subset of the
3972 first, and the ranges begin at the same place, we go from just after
3973 the end of the first range to the end of the second. If the second
3974 range is not a subset of the first, or if it is a subset and both
3975 ranges end at the same place, the range starts at the start of the
3976 first range and ends just before the second range.
3977 Otherwise, we can't describe this as a single range. */
3978 if (no_overlap)
3979 in_p = 1, low = low0, high = high0;
3980 else if (lowequal && highequal)
3981 in_p = 0, low = high = 0;
3982 else if (subset && lowequal)
3983 {
3984 in_p = 1, high = high0;
3985 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3986 integer_one_node, 0);
3987 }
3988 else if (! subset || highequal)
3989 {
3990 in_p = 1, low = low0;
3991 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3992 integer_one_node, 0);
3993 }
3994 else
3995 return 0;
3996 }
3997
3998 else if (! in0_p && in1_p)
3999 {
4000 /* If they don't overlap, the result is the second range. If the second
4001 is a subset of the first, the result is false. Otherwise,
4002 the range starts just after the first range and ends at the
4003 end of the second. */
4004 if (no_overlap)
4005 in_p = 1, low = low1, high = high1;
4006 else if (subset || highequal)
4007 in_p = 0, low = high = 0;
4008 else
4009 {
4010 in_p = 1, high = high1;
4011 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4012 integer_one_node, 0);
4013 }
4014 }
4015
4016 else
4017 {
4018 /* The case where we are excluding both ranges. Here the complex case
4019 is if they don't overlap. In that case, the only time we have a
4020 range is if they are adjacent. If the second is a subset of the
4021 first, the result is the first. Otherwise, the range to exclude
4022 starts at the beginning of the first range and ends at the end of the
4023 second. */
4024 if (no_overlap)
4025 {
4026 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4027 range_binop (PLUS_EXPR, NULL_TREE,
4028 high0, 1,
4029 integer_one_node, 1),
4030 1, low1, 0)))
4031 in_p = 0, low = low0, high = high1;
4032 else
4033 {
4034 /* Canonicalize - [min, x] into - [-, x]. */
4035 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4036 switch (TREE_CODE (TREE_TYPE (low0)))
4037 {
4038 case ENUMERAL_TYPE:
4039 if (TYPE_PRECISION (TREE_TYPE (low0))
4040 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4041 break;
4042 /* FALLTHROUGH */
4043 case INTEGER_TYPE:
4044 case CHAR_TYPE:
4045 if (tree_int_cst_equal (low0,
4046 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4047 low0 = 0;
4048 break;
4049 case POINTER_TYPE:
4050 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4051 && integer_zerop (low0))
4052 low0 = 0;
4053 break;
4054 default:
4055 break;
4056 }
4057
4058 /* Canonicalize - [x, max] into - [x, -]. */
4059 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4060 switch (TREE_CODE (TREE_TYPE (high1)))
4061 {
4062 case ENUMERAL_TYPE:
4063 if (TYPE_PRECISION (TREE_TYPE (high1))
4064 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4065 break;
4066 /* FALLTHROUGH */
4067 case INTEGER_TYPE:
4068 case CHAR_TYPE:
4069 if (tree_int_cst_equal (high1,
4070 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4071 high1 = 0;
4072 break;
4073 case POINTER_TYPE:
4074 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4075 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4076 high1, 1,
4077 integer_one_node, 1)))
4078 high1 = 0;
4079 break;
4080 default:
4081 break;
4082 }
4083
4084 /* The ranges might be also adjacent between the maximum and
4085 minimum values of the given type. For
4086 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4087 return + [x + 1, y - 1]. */
4088 if (low0 == 0 && high1 == 0)
4089 {
4090 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4091 integer_one_node, 1);
4092 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4093 integer_one_node, 0);
4094 if (low == 0 || high == 0)
4095 return 0;
4096
4097 in_p = 1;
4098 }
4099 else
4100 return 0;
4101 }
4102 }
4103 else if (subset)
4104 in_p = 0, low = low0, high = high0;
4105 else
4106 in_p = 0, low = low0, high = high1;
4107 }
4108
4109 *pin_p = in_p, *plow = low, *phigh = high;
4110 return 1;
4111 }
4112 \f
4113
4114 /* Subroutine of fold, looking inside expressions of the form
4115 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4116 of the COND_EXPR. This function is being used also to optimize
4117 A op B ? C : A, by reversing the comparison first.
4118
4119 Return a folded expression whose code is not a COND_EXPR
4120 anymore, or NULL_TREE if no folding opportunity is found. */
4121
4122 static tree
4123 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4124 {
4125 enum tree_code comp_code = TREE_CODE (arg0);
4126 tree arg00 = TREE_OPERAND (arg0, 0);
4127 tree arg01 = TREE_OPERAND (arg0, 1);
4128 tree arg1_type = TREE_TYPE (arg1);
4129 tree tem;
4130
4131 STRIP_NOPS (arg1);
4132 STRIP_NOPS (arg2);
4133
4134 /* If we have A op 0 ? A : -A, consider applying the following
4135 transformations:
4136
4137 A == 0? A : -A same as -A
4138 A != 0? A : -A same as A
4139 A >= 0? A : -A same as abs (A)
4140 A > 0? A : -A same as abs (A)
4141 A <= 0? A : -A same as -abs (A)
4142 A < 0? A : -A same as -abs (A)
4143
4144 None of these transformations work for modes with signed
4145 zeros. If A is +/-0, the first two transformations will
4146 change the sign of the result (from +0 to -0, or vice
4147 versa). The last four will fix the sign of the result,
4148 even though the original expressions could be positive or
4149 negative, depending on the sign of A.
4150
4151 Note that all these transformations are correct if A is
4152 NaN, since the two alternatives (A and -A) are also NaNs. */
4153 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4154 ? real_zerop (arg01)
4155 : integer_zerop (arg01))
4156 && TREE_CODE (arg2) == NEGATE_EXPR
4157 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4158 switch (comp_code)
4159 {
4160 case EQ_EXPR:
4161 tem = fold_convert (arg1_type, arg1);
4162 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4163 case NE_EXPR:
4164 return pedantic_non_lvalue (fold_convert (type, arg1));
4165 case GE_EXPR:
4166 case GT_EXPR:
4167 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4168 arg1 = fold_convert (lang_hooks.types.signed_type
4169 (TREE_TYPE (arg1)), arg1);
4170 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4171 return pedantic_non_lvalue (fold_convert (type, tem));
4172 case LE_EXPR:
4173 case LT_EXPR:
4174 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4175 arg1 = fold_convert (lang_hooks.types.signed_type
4176 (TREE_TYPE (arg1)), arg1);
4177 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4178 return negate_expr (fold_convert (type, tem));
4179 default:
4180 abort ();
4181 }
4182
4183 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4184 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4185 both transformations are correct when A is NaN: A != 0
4186 is then true, and A == 0 is false. */
4187
4188 if (integer_zerop (arg01) && integer_zerop (arg2))
4189 {
4190 if (comp_code == NE_EXPR)
4191 return pedantic_non_lvalue (fold_convert (type, arg1));
4192 else if (comp_code == EQ_EXPR)
4193 return pedantic_non_lvalue (fold_convert (type, integer_zero_node));
4194 }
4195
4196 /* Try some transformations of A op B ? A : B.
4197
4198 A == B? A : B same as B
4199 A != B? A : B same as A
4200 A >= B? A : B same as max (A, B)
4201 A > B? A : B same as max (B, A)
4202 A <= B? A : B same as min (A, B)
4203 A < B? A : B same as min (B, A)
4204
4205 As above, these transformations don't work in the presence
4206 of signed zeros. For example, if A and B are zeros of
4207 opposite sign, the first two transformations will change
4208 the sign of the result. In the last four, the original
4209 expressions give different results for (A=+0, B=-0) and
4210 (A=-0, B=+0), but the transformed expressions do not.
4211
4212 The first two transformations are correct if either A or B
4213 is a NaN. In the first transformation, the condition will
4214 be false, and B will indeed be chosen. In the case of the
4215 second transformation, the condition A != B will be true,
4216 and A will be chosen.
4217
4218 The conversions to max() and min() are not correct if B is
4219 a number and A is not. The conditions in the original
4220 expressions will be false, so all four give B. The min()
4221 and max() versions would give a NaN instead. */
4222 if (operand_equal_for_comparison_p (arg01, arg2, arg00))
4223 {
4224 tree comp_op0 = arg00;
4225 tree comp_op1 = arg01;
4226 tree comp_type = TREE_TYPE (comp_op0);
4227
4228 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4229 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4230 {
4231 comp_type = type;
4232 comp_op0 = arg1;
4233 comp_op1 = arg2;
4234 }
4235
4236 switch (comp_code)
4237 {
4238 case EQ_EXPR:
4239 return pedantic_non_lvalue (fold_convert (type, arg2));
4240 case NE_EXPR:
4241 return pedantic_non_lvalue (fold_convert (type, arg1));
4242 case LE_EXPR:
4243 case LT_EXPR:
4244 /* In C++ a ?: expression can be an lvalue, so put the
4245 operand which will be used if they are equal first
4246 so that we can convert this back to the
4247 corresponding COND_EXPR. */
4248 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4249 return pedantic_non_lvalue (
4250 fold_convert (type, fold (build2 (MIN_EXPR, comp_type,
4251 (comp_code == LE_EXPR
4252 ? comp_op0 : comp_op1),
4253 (comp_code == LE_EXPR
4254 ? comp_op1 : comp_op0)))));
4255 break;
4256 case GE_EXPR:
4257 case GT_EXPR:
4258 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4259 return pedantic_non_lvalue (
4260 fold_convert (type, fold (build2 (MAX_EXPR, comp_type,
4261 (comp_code == GE_EXPR
4262 ? comp_op0 : comp_op1),
4263 (comp_code == GE_EXPR
4264 ? comp_op1 : comp_op0)))));
4265 break;
4266 default:
4267 abort ();
4268 }
4269 }
4270
4271 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4272 we might still be able to simplify this. For example,
4273 if C1 is one less or one more than C2, this might have started
4274 out as a MIN or MAX and been transformed by this function.
4275 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4276
4277 if (INTEGRAL_TYPE_P (type)
4278 && TREE_CODE (arg01) == INTEGER_CST
4279 && TREE_CODE (arg2) == INTEGER_CST)
4280 switch (comp_code)
4281 {
4282 case EQ_EXPR:
4283 /* We can replace A with C1 in this case. */
4284 arg1 = fold_convert (type, arg01);
4285 return fold (build3 (COND_EXPR, type, arg0, arg1, arg2));
4286
4287 case LT_EXPR:
4288 /* If C1 is C2 + 1, this is min(A, C2). */
4289 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4290 OEP_ONLY_CONST)
4291 && operand_equal_p (arg01,
4292 const_binop (PLUS_EXPR, arg2,
4293 integer_one_node, 0),
4294 OEP_ONLY_CONST))
4295 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4296 type, arg1, arg2)));
4297 break;
4298
4299 case LE_EXPR:
4300 /* If C1 is C2 - 1, this is min(A, C2). */
4301 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4302 OEP_ONLY_CONST)
4303 && operand_equal_p (arg01,
4304 const_binop (MINUS_EXPR, arg2,
4305 integer_one_node, 0),
4306 OEP_ONLY_CONST))
4307 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4308 type, arg1, arg2)));
4309 break;
4310
4311 case GT_EXPR:
4312 /* If C1 is C2 - 1, this is max(A, C2). */
4313 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4314 OEP_ONLY_CONST)
4315 && operand_equal_p (arg01,
4316 const_binop (MINUS_EXPR, arg2,
4317 integer_one_node, 0),
4318 OEP_ONLY_CONST))
4319 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4320 type, arg1, arg2)));
4321 break;
4322
4323 case GE_EXPR:
4324 /* If C1 is C2 + 1, this is max(A, C2). */
4325 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4326 OEP_ONLY_CONST)
4327 && operand_equal_p (arg01,
4328 const_binop (PLUS_EXPR, arg2,
4329 integer_one_node, 0),
4330 OEP_ONLY_CONST))
4331 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4332 type, arg1, arg2)));
4333 break;
4334 case NE_EXPR:
4335 break;
4336 default:
4337 abort ();
4338 }
4339
4340 return NULL_TREE;
4341 }
4342
4343
4344 \f
4345 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
4346 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4347 #endif
4348
4349 /* EXP is some logical combination of boolean tests. See if we can
4350 merge it into some range test. Return the new tree if so. */
4351
4352 static tree
4353 fold_range_test (tree exp)
4354 {
4355 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
4356 || TREE_CODE (exp) == TRUTH_OR_EXPR);
4357 int in0_p, in1_p, in_p;
4358 tree low0, low1, low, high0, high1, high;
4359 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
4360 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
4361 tree tem;
4362
4363 /* If this is an OR operation, invert both sides; we will invert
4364 again at the end. */
4365 if (or_op)
4366 in0_p = ! in0_p, in1_p = ! in1_p;
4367
4368 /* If both expressions are the same, if we can merge the ranges, and we
4369 can build the range test, return it or it inverted. If one of the
4370 ranges is always true or always false, consider it to be the same
4371 expression as the other. */
4372 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4373 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4374 in1_p, low1, high1)
4375 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
4376 lhs != 0 ? lhs
4377 : rhs != 0 ? rhs : integer_zero_node,
4378 in_p, low, high))))
4379 return or_op ? invert_truthvalue (tem) : tem;
4380
4381 /* On machines where the branch cost is expensive, if this is a
4382 short-circuited branch and the underlying object on both sides
4383 is the same, make a non-short-circuit operation. */
4384 else if (RANGE_TEST_NON_SHORT_CIRCUIT
4385 && lhs != 0 && rhs != 0
4386 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4387 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
4388 && operand_equal_p (lhs, rhs, 0))
4389 {
4390 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4391 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4392 which cases we can't do this. */
4393 if (simple_operand_p (lhs))
4394 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4395 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4396 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
4397 TREE_OPERAND (exp, 1));
4398
4399 else if (lang_hooks.decls.global_bindings_p () == 0
4400 && ! CONTAINS_PLACEHOLDER_P (lhs))
4401 {
4402 tree common = save_expr (lhs);
4403
4404 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
4405 or_op ? ! in0_p : in0_p,
4406 low0, high0))
4407 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
4408 or_op ? ! in1_p : in1_p,
4409 low1, high1))))
4410 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4411 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4412 TREE_TYPE (exp), lhs, rhs);
4413 }
4414 }
4415
4416 return 0;
4417 }
4418 \f
4419 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4420 bit value. Arrange things so the extra bits will be set to zero if and
4421 only if C is signed-extended to its full width. If MASK is nonzero,
4422 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4423
4424 static tree
4425 unextend (tree c, int p, int unsignedp, tree mask)
4426 {
4427 tree type = TREE_TYPE (c);
4428 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4429 tree temp;
4430
4431 if (p == modesize || unsignedp)
4432 return c;
4433
4434 /* We work by getting just the sign bit into the low-order bit, then
4435 into the high-order bit, then sign-extend. We then XOR that value
4436 with C. */
4437 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4438 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4439
4440 /* We must use a signed type in order to get an arithmetic right shift.
4441 However, we must also avoid introducing accidental overflows, so that
4442 a subsequent call to integer_zerop will work. Hence we must
4443 do the type conversion here. At this point, the constant is either
4444 zero or one, and the conversion to a signed type can never overflow.
4445 We could get an overflow if this conversion is done anywhere else. */
4446 if (TYPE_UNSIGNED (type))
4447 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4448
4449 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4450 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4451 if (mask != 0)
4452 temp = const_binop (BIT_AND_EXPR, temp,
4453 fold_convert (TREE_TYPE (c), mask), 0);
4454 /* If necessary, convert the type back to match the type of C. */
4455 if (TYPE_UNSIGNED (type))
4456 temp = fold_convert (type, temp);
4457
4458 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4459 }
4460 \f
4461 /* Find ways of folding logical expressions of LHS and RHS:
4462 Try to merge two comparisons to the same innermost item.
4463 Look for range tests like "ch >= '0' && ch <= '9'".
4464 Look for combinations of simple terms on machines with expensive branches
4465 and evaluate the RHS unconditionally.
4466
4467 For example, if we have p->a == 2 && p->b == 4 and we can make an
4468 object large enough to span both A and B, we can do this with a comparison
4469 against the object ANDed with the a mask.
4470
4471 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4472 operations to do this with one comparison.
4473
4474 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4475 function and the one above.
4476
4477 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4478 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4479
4480 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4481 two operands.
4482
4483 We return the simplified tree or 0 if no optimization is possible. */
4484
4485 static tree
4486 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4487 {
4488 /* If this is the "or" of two comparisons, we can do something if
4489 the comparisons are NE_EXPR. If this is the "and", we can do something
4490 if the comparisons are EQ_EXPR. I.e.,
4491 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4492
4493 WANTED_CODE is this operation code. For single bit fields, we can
4494 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4495 comparison for one-bit fields. */
4496
4497 enum tree_code wanted_code;
4498 enum tree_code lcode, rcode;
4499 tree ll_arg, lr_arg, rl_arg, rr_arg;
4500 tree ll_inner, lr_inner, rl_inner, rr_inner;
4501 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4502 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4503 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4504 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4505 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4506 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4507 enum machine_mode lnmode, rnmode;
4508 tree ll_mask, lr_mask, rl_mask, rr_mask;
4509 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4510 tree l_const, r_const;
4511 tree lntype, rntype, result;
4512 int first_bit, end_bit;
4513 int volatilep;
4514
4515 /* Start by getting the comparison codes. Fail if anything is volatile.
4516 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4517 it were surrounded with a NE_EXPR. */
4518
4519 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4520 return 0;
4521
4522 lcode = TREE_CODE (lhs);
4523 rcode = TREE_CODE (rhs);
4524
4525 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4526 {
4527 lhs = build2 (NE_EXPR, truth_type, lhs, integer_zero_node);
4528 lcode = NE_EXPR;
4529 }
4530
4531 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4532 {
4533 rhs = build2 (NE_EXPR, truth_type, rhs, integer_zero_node);
4534 rcode = NE_EXPR;
4535 }
4536
4537 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
4538 return 0;
4539
4540 ll_arg = TREE_OPERAND (lhs, 0);
4541 lr_arg = TREE_OPERAND (lhs, 1);
4542 rl_arg = TREE_OPERAND (rhs, 0);
4543 rr_arg = TREE_OPERAND (rhs, 1);
4544
4545 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4546 if (simple_operand_p (ll_arg)
4547 && simple_operand_p (lr_arg))
4548 {
4549 tree result;
4550 if (operand_equal_p (ll_arg, rl_arg, 0)
4551 && operand_equal_p (lr_arg, rr_arg, 0))
4552 {
4553 result = combine_comparisons (code, lcode, rcode,
4554 truth_type, ll_arg, lr_arg);
4555 if (result)
4556 return result;
4557 }
4558 else if (operand_equal_p (ll_arg, rr_arg, 0)
4559 && operand_equal_p (lr_arg, rl_arg, 0))
4560 {
4561 result = combine_comparisons (code, lcode,
4562 swap_tree_comparison (rcode),
4563 truth_type, ll_arg, lr_arg);
4564 if (result)
4565 return result;
4566 }
4567 }
4568
4569 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4570 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4571
4572 /* If the RHS can be evaluated unconditionally and its operands are
4573 simple, it wins to evaluate the RHS unconditionally on machines
4574 with expensive branches. In this case, this isn't a comparison
4575 that can be merged. Avoid doing this if the RHS is a floating-point
4576 comparison since those can trap. */
4577
4578 if (BRANCH_COST >= 2
4579 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4580 && simple_operand_p (rl_arg)
4581 && simple_operand_p (rr_arg))
4582 {
4583 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4584 if (code == TRUTH_OR_EXPR
4585 && lcode == NE_EXPR && integer_zerop (lr_arg)
4586 && rcode == NE_EXPR && integer_zerop (rr_arg)
4587 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4588 return build2 (NE_EXPR, truth_type,
4589 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4590 ll_arg, rl_arg),
4591 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4592
4593 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4594 if (code == TRUTH_AND_EXPR
4595 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4596 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4597 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4598 return build2 (EQ_EXPR, truth_type,
4599 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4600 ll_arg, rl_arg),
4601 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4602
4603 return build2 (code, truth_type, lhs, rhs);
4604 }
4605
4606 /* See if the comparisons can be merged. Then get all the parameters for
4607 each side. */
4608
4609 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4610 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4611 return 0;
4612
4613 volatilep = 0;
4614 ll_inner = decode_field_reference (ll_arg,
4615 &ll_bitsize, &ll_bitpos, &ll_mode,
4616 &ll_unsignedp, &volatilep, &ll_mask,
4617 &ll_and_mask);
4618 lr_inner = decode_field_reference (lr_arg,
4619 &lr_bitsize, &lr_bitpos, &lr_mode,
4620 &lr_unsignedp, &volatilep, &lr_mask,
4621 &lr_and_mask);
4622 rl_inner = decode_field_reference (rl_arg,
4623 &rl_bitsize, &rl_bitpos, &rl_mode,
4624 &rl_unsignedp, &volatilep, &rl_mask,
4625 &rl_and_mask);
4626 rr_inner = decode_field_reference (rr_arg,
4627 &rr_bitsize, &rr_bitpos, &rr_mode,
4628 &rr_unsignedp, &volatilep, &rr_mask,
4629 &rr_and_mask);
4630
4631 /* It must be true that the inner operation on the lhs of each
4632 comparison must be the same if we are to be able to do anything.
4633 Then see if we have constants. If not, the same must be true for
4634 the rhs's. */
4635 if (volatilep || ll_inner == 0 || rl_inner == 0
4636 || ! operand_equal_p (ll_inner, rl_inner, 0))
4637 return 0;
4638
4639 if (TREE_CODE (lr_arg) == INTEGER_CST
4640 && TREE_CODE (rr_arg) == INTEGER_CST)
4641 l_const = lr_arg, r_const = rr_arg;
4642 else if (lr_inner == 0 || rr_inner == 0
4643 || ! operand_equal_p (lr_inner, rr_inner, 0))
4644 return 0;
4645 else
4646 l_const = r_const = 0;
4647
4648 /* If either comparison code is not correct for our logical operation,
4649 fail. However, we can convert a one-bit comparison against zero into
4650 the opposite comparison against that bit being set in the field. */
4651
4652 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4653 if (lcode != wanted_code)
4654 {
4655 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4656 {
4657 /* Make the left operand unsigned, since we are only interested
4658 in the value of one bit. Otherwise we are doing the wrong
4659 thing below. */
4660 ll_unsignedp = 1;
4661 l_const = ll_mask;
4662 }
4663 else
4664 return 0;
4665 }
4666
4667 /* This is analogous to the code for l_const above. */
4668 if (rcode != wanted_code)
4669 {
4670 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4671 {
4672 rl_unsignedp = 1;
4673 r_const = rl_mask;
4674 }
4675 else
4676 return 0;
4677 }
4678
4679 /* After this point all optimizations will generate bit-field
4680 references, which we might not want. */
4681 if (! lang_hooks.can_use_bit_fields_p ())
4682 return 0;
4683
4684 /* See if we can find a mode that contains both fields being compared on
4685 the left. If we can't, fail. Otherwise, update all constants and masks
4686 to be relative to a field of that size. */
4687 first_bit = MIN (ll_bitpos, rl_bitpos);
4688 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4689 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4690 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4691 volatilep);
4692 if (lnmode == VOIDmode)
4693 return 0;
4694
4695 lnbitsize = GET_MODE_BITSIZE (lnmode);
4696 lnbitpos = first_bit & ~ (lnbitsize - 1);
4697 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4698 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4699
4700 if (BYTES_BIG_ENDIAN)
4701 {
4702 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4703 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4704 }
4705
4706 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4707 size_int (xll_bitpos), 0);
4708 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4709 size_int (xrl_bitpos), 0);
4710
4711 if (l_const)
4712 {
4713 l_const = fold_convert (lntype, l_const);
4714 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4715 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4716 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4717 fold (build1 (BIT_NOT_EXPR,
4718 lntype, ll_mask)),
4719 0)))
4720 {
4721 warning ("comparison is always %d", wanted_code == NE_EXPR);
4722
4723 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4724 }
4725 }
4726 if (r_const)
4727 {
4728 r_const = fold_convert (lntype, r_const);
4729 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4730 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4731 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4732 fold (build1 (BIT_NOT_EXPR,
4733 lntype, rl_mask)),
4734 0)))
4735 {
4736 warning ("comparison is always %d", wanted_code == NE_EXPR);
4737
4738 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4739 }
4740 }
4741
4742 /* If the right sides are not constant, do the same for it. Also,
4743 disallow this optimization if a size or signedness mismatch occurs
4744 between the left and right sides. */
4745 if (l_const == 0)
4746 {
4747 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4748 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4749 /* Make sure the two fields on the right
4750 correspond to the left without being swapped. */
4751 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4752 return 0;
4753
4754 first_bit = MIN (lr_bitpos, rr_bitpos);
4755 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4756 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4757 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4758 volatilep);
4759 if (rnmode == VOIDmode)
4760 return 0;
4761
4762 rnbitsize = GET_MODE_BITSIZE (rnmode);
4763 rnbitpos = first_bit & ~ (rnbitsize - 1);
4764 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4765 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4766
4767 if (BYTES_BIG_ENDIAN)
4768 {
4769 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4770 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4771 }
4772
4773 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4774 size_int (xlr_bitpos), 0);
4775 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4776 size_int (xrr_bitpos), 0);
4777
4778 /* Make a mask that corresponds to both fields being compared.
4779 Do this for both items being compared. If the operands are the
4780 same size and the bits being compared are in the same position
4781 then we can do this by masking both and comparing the masked
4782 results. */
4783 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4784 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4785 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4786 {
4787 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4788 ll_unsignedp || rl_unsignedp);
4789 if (! all_ones_mask_p (ll_mask, lnbitsize))
4790 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4791
4792 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4793 lr_unsignedp || rr_unsignedp);
4794 if (! all_ones_mask_p (lr_mask, rnbitsize))
4795 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4796
4797 return build2 (wanted_code, truth_type, lhs, rhs);
4798 }
4799
4800 /* There is still another way we can do something: If both pairs of
4801 fields being compared are adjacent, we may be able to make a wider
4802 field containing them both.
4803
4804 Note that we still must mask the lhs/rhs expressions. Furthermore,
4805 the mask must be shifted to account for the shift done by
4806 make_bit_field_ref. */
4807 if ((ll_bitsize + ll_bitpos == rl_bitpos
4808 && lr_bitsize + lr_bitpos == rr_bitpos)
4809 || (ll_bitpos == rl_bitpos + rl_bitsize
4810 && lr_bitpos == rr_bitpos + rr_bitsize))
4811 {
4812 tree type;
4813
4814 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4815 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4816 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4817 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4818
4819 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4820 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4821 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4822 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4823
4824 /* Convert to the smaller type before masking out unwanted bits. */
4825 type = lntype;
4826 if (lntype != rntype)
4827 {
4828 if (lnbitsize > rnbitsize)
4829 {
4830 lhs = fold_convert (rntype, lhs);
4831 ll_mask = fold_convert (rntype, ll_mask);
4832 type = rntype;
4833 }
4834 else if (lnbitsize < rnbitsize)
4835 {
4836 rhs = fold_convert (lntype, rhs);
4837 lr_mask = fold_convert (lntype, lr_mask);
4838 type = lntype;
4839 }
4840 }
4841
4842 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4843 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4844
4845 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4846 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4847
4848 return build2 (wanted_code, truth_type, lhs, rhs);
4849 }
4850
4851 return 0;
4852 }
4853
4854 /* Handle the case of comparisons with constants. If there is something in
4855 common between the masks, those bits of the constants must be the same.
4856 If not, the condition is always false. Test for this to avoid generating
4857 incorrect code below. */
4858 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4859 if (! integer_zerop (result)
4860 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4861 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4862 {
4863 if (wanted_code == NE_EXPR)
4864 {
4865 warning ("`or' of unmatched not-equal tests is always 1");
4866 return constant_boolean_node (true, truth_type);
4867 }
4868 else
4869 {
4870 warning ("`and' of mutually exclusive equal-tests is always 0");
4871 return constant_boolean_node (false, truth_type);
4872 }
4873 }
4874
4875 /* Construct the expression we will return. First get the component
4876 reference we will make. Unless the mask is all ones the width of
4877 that field, perform the mask operation. Then compare with the
4878 merged constant. */
4879 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4880 ll_unsignedp || rl_unsignedp);
4881
4882 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4883 if (! all_ones_mask_p (ll_mask, lnbitsize))
4884 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4885
4886 return build2 (wanted_code, truth_type, result,
4887 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4888 }
4889 \f
4890 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4891 constant. */
4892
4893 static tree
4894 optimize_minmax_comparison (tree t)
4895 {
4896 tree type = TREE_TYPE (t);
4897 tree arg0 = TREE_OPERAND (t, 0);
4898 enum tree_code op_code;
4899 tree comp_const = TREE_OPERAND (t, 1);
4900 tree minmax_const;
4901 int consts_equal, consts_lt;
4902 tree inner;
4903
4904 STRIP_SIGN_NOPS (arg0);
4905
4906 op_code = TREE_CODE (arg0);
4907 minmax_const = TREE_OPERAND (arg0, 1);
4908 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4909 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4910 inner = TREE_OPERAND (arg0, 0);
4911
4912 /* If something does not permit us to optimize, return the original tree. */
4913 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4914 || TREE_CODE (comp_const) != INTEGER_CST
4915 || TREE_CONSTANT_OVERFLOW (comp_const)
4916 || TREE_CODE (minmax_const) != INTEGER_CST
4917 || TREE_CONSTANT_OVERFLOW (minmax_const))
4918 return t;
4919
4920 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4921 and GT_EXPR, doing the rest with recursive calls using logical
4922 simplifications. */
4923 switch (TREE_CODE (t))
4924 {
4925 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4926 return
4927 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4928
4929 case GE_EXPR:
4930 return
4931 fold (build2 (TRUTH_ORIF_EXPR, type,
4932 optimize_minmax_comparison
4933 (build2 (EQ_EXPR, type, arg0, comp_const)),
4934 optimize_minmax_comparison
4935 (build2 (GT_EXPR, type, arg0, comp_const))));
4936
4937 case EQ_EXPR:
4938 if (op_code == MAX_EXPR && consts_equal)
4939 /* MAX (X, 0) == 0 -> X <= 0 */
4940 return fold (build2 (LE_EXPR, type, inner, comp_const));
4941
4942 else if (op_code == MAX_EXPR && consts_lt)
4943 /* MAX (X, 0) == 5 -> X == 5 */
4944 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4945
4946 else if (op_code == MAX_EXPR)
4947 /* MAX (X, 0) == -1 -> false */
4948 return omit_one_operand (type, integer_zero_node, inner);
4949
4950 else if (consts_equal)
4951 /* MIN (X, 0) == 0 -> X >= 0 */
4952 return fold (build2 (GE_EXPR, type, inner, comp_const));
4953
4954 else if (consts_lt)
4955 /* MIN (X, 0) == 5 -> false */
4956 return omit_one_operand (type, integer_zero_node, inner);
4957
4958 else
4959 /* MIN (X, 0) == -1 -> X == -1 */
4960 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4961
4962 case GT_EXPR:
4963 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4964 /* MAX (X, 0) > 0 -> X > 0
4965 MAX (X, 0) > 5 -> X > 5 */
4966 return fold (build2 (GT_EXPR, type, inner, comp_const));
4967
4968 else if (op_code == MAX_EXPR)
4969 /* MAX (X, 0) > -1 -> true */
4970 return omit_one_operand (type, integer_one_node, inner);
4971
4972 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4973 /* MIN (X, 0) > 0 -> false
4974 MIN (X, 0) > 5 -> false */
4975 return omit_one_operand (type, integer_zero_node, inner);
4976
4977 else
4978 /* MIN (X, 0) > -1 -> X > -1 */
4979 return fold (build2 (GT_EXPR, type, inner, comp_const));
4980
4981 default:
4982 return t;
4983 }
4984 }
4985 \f
4986 /* T is an integer expression that is being multiplied, divided, or taken a
4987 modulus (CODE says which and what kind of divide or modulus) by a
4988 constant C. See if we can eliminate that operation by folding it with
4989 other operations already in T. WIDE_TYPE, if non-null, is a type that
4990 should be used for the computation if wider than our type.
4991
4992 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4993 (X * 2) + (Y * 4). We must, however, be assured that either the original
4994 expression would not overflow or that overflow is undefined for the type
4995 in the language in question.
4996
4997 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4998 the machine has a multiply-accumulate insn or that this is part of an
4999 addressing calculation.
5000
5001 If we return a non-null expression, it is an equivalent form of the
5002 original computation, but need not be in the original type. */
5003
5004 static tree
5005 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5006 {
5007 /* To avoid exponential search depth, refuse to allow recursion past
5008 three levels. Beyond that (1) it's highly unlikely that we'll find
5009 something interesting and (2) we've probably processed it before
5010 when we built the inner expression. */
5011
5012 static int depth;
5013 tree ret;
5014
5015 if (depth > 3)
5016 return NULL;
5017
5018 depth++;
5019 ret = extract_muldiv_1 (t, c, code, wide_type);
5020 depth--;
5021
5022 return ret;
5023 }
5024
5025 static tree
5026 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5027 {
5028 tree type = TREE_TYPE (t);
5029 enum tree_code tcode = TREE_CODE (t);
5030 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5031 > GET_MODE_SIZE (TYPE_MODE (type)))
5032 ? wide_type : type);
5033 tree t1, t2;
5034 int same_p = tcode == code;
5035 tree op0 = NULL_TREE, op1 = NULL_TREE;
5036
5037 /* Don't deal with constants of zero here; they confuse the code below. */
5038 if (integer_zerop (c))
5039 return NULL_TREE;
5040
5041 if (TREE_CODE_CLASS (tcode) == '1')
5042 op0 = TREE_OPERAND (t, 0);
5043
5044 if (TREE_CODE_CLASS (tcode) == '2')
5045 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5046
5047 /* Note that we need not handle conditional operations here since fold
5048 already handles those cases. So just do arithmetic here. */
5049 switch (tcode)
5050 {
5051 case INTEGER_CST:
5052 /* For a constant, we can always simplify if we are a multiply
5053 or (for divide and modulus) if it is a multiple of our constant. */
5054 if (code == MULT_EXPR
5055 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5056 return const_binop (code, fold_convert (ctype, t),
5057 fold_convert (ctype, c), 0);
5058 break;
5059
5060 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5061 /* If op0 is an expression ... */
5062 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
5063 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
5064 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
5065 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
5066 /* ... and is unsigned, and its type is smaller than ctype,
5067 then we cannot pass through as widening. */
5068 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5069 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5070 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5071 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5072 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5073 /* ... or its type is larger than ctype,
5074 then we cannot pass through this truncation. */
5075 || (GET_MODE_SIZE (TYPE_MODE (ctype))
5076 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5077 /* ... or signedness changes for division or modulus,
5078 then we cannot pass through this conversion. */
5079 || (code != MULT_EXPR
5080 && (TYPE_UNSIGNED (ctype)
5081 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5082 break;
5083
5084 /* Pass the constant down and see if we can make a simplification. If
5085 we can, replace this expression with the inner simplification for
5086 possible later conversion to our or some other type. */
5087 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5088 && TREE_CODE (t2) == INTEGER_CST
5089 && ! TREE_CONSTANT_OVERFLOW (t2)
5090 && (0 != (t1 = extract_muldiv (op0, t2, code,
5091 code == MULT_EXPR
5092 ? ctype : NULL_TREE))))
5093 return t1;
5094 break;
5095
5096 case NEGATE_EXPR: case ABS_EXPR:
5097 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5098 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
5099 break;
5100
5101 case MIN_EXPR: case MAX_EXPR:
5102 /* If widening the type changes the signedness, then we can't perform
5103 this optimization as that changes the result. */
5104 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5105 break;
5106
5107 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5108 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5109 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5110 {
5111 if (tree_int_cst_sgn (c) < 0)
5112 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5113
5114 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5115 fold_convert (ctype, t2)));
5116 }
5117 break;
5118
5119 case LSHIFT_EXPR: case RSHIFT_EXPR:
5120 /* If the second operand is constant, this is a multiplication
5121 or floor division, by a power of two, so we can treat it that
5122 way unless the multiplier or divisor overflows. */
5123 if (TREE_CODE (op1) == INTEGER_CST
5124 /* const_binop may not detect overflow correctly,
5125 so check for it explicitly here. */
5126 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5127 && TREE_INT_CST_HIGH (op1) == 0
5128 && 0 != (t1 = fold_convert (ctype,
5129 const_binop (LSHIFT_EXPR,
5130 size_one_node,
5131 op1, 0)))
5132 && ! TREE_OVERFLOW (t1))
5133 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5134 ? MULT_EXPR : FLOOR_DIV_EXPR,
5135 ctype, fold_convert (ctype, op0), t1),
5136 c, code, wide_type);
5137 break;
5138
5139 case PLUS_EXPR: case MINUS_EXPR:
5140 /* See if we can eliminate the operation on both sides. If we can, we
5141 can return a new PLUS or MINUS. If we can't, the only remaining
5142 cases where we can do anything are if the second operand is a
5143 constant. */
5144 t1 = extract_muldiv (op0, c, code, wide_type);
5145 t2 = extract_muldiv (op1, c, code, wide_type);
5146 if (t1 != 0 && t2 != 0
5147 && (code == MULT_EXPR
5148 /* If not multiplication, we can only do this if both operands
5149 are divisible by c. */
5150 || (multiple_of_p (ctype, op0, c)
5151 && multiple_of_p (ctype, op1, c))))
5152 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5153 fold_convert (ctype, t2)));
5154
5155 /* If this was a subtraction, negate OP1 and set it to be an addition.
5156 This simplifies the logic below. */
5157 if (tcode == MINUS_EXPR)
5158 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5159
5160 if (TREE_CODE (op1) != INTEGER_CST)
5161 break;
5162
5163 /* If either OP1 or C are negative, this optimization is not safe for
5164 some of the division and remainder types while for others we need
5165 to change the code. */
5166 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5167 {
5168 if (code == CEIL_DIV_EXPR)
5169 code = FLOOR_DIV_EXPR;
5170 else if (code == FLOOR_DIV_EXPR)
5171 code = CEIL_DIV_EXPR;
5172 else if (code != MULT_EXPR
5173 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5174 break;
5175 }
5176
5177 /* If it's a multiply or a division/modulus operation of a multiple
5178 of our constant, do the operation and verify it doesn't overflow. */
5179 if (code == MULT_EXPR
5180 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5181 {
5182 op1 = const_binop (code, fold_convert (ctype, op1),
5183 fold_convert (ctype, c), 0);
5184 /* We allow the constant to overflow with wrapping semantics. */
5185 if (op1 == 0
5186 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5187 break;
5188 }
5189 else
5190 break;
5191
5192 /* If we have an unsigned type is not a sizetype, we cannot widen
5193 the operation since it will change the result if the original
5194 computation overflowed. */
5195 if (TYPE_UNSIGNED (ctype)
5196 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5197 && ctype != type)
5198 break;
5199
5200 /* If we were able to eliminate our operation from the first side,
5201 apply our operation to the second side and reform the PLUS. */
5202 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5203 return fold (build2 (tcode, ctype, fold_convert (ctype, t1), op1));
5204
5205 /* The last case is if we are a multiply. In that case, we can
5206 apply the distributive law to commute the multiply and addition
5207 if the multiplication of the constants doesn't overflow. */
5208 if (code == MULT_EXPR)
5209 return fold (build2 (tcode, ctype,
5210 fold (build2 (code, ctype,
5211 fold_convert (ctype, op0),
5212 fold_convert (ctype, c))),
5213 op1));
5214
5215 break;
5216
5217 case MULT_EXPR:
5218 /* We have a special case here if we are doing something like
5219 (C * 8) % 4 since we know that's zero. */
5220 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5221 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5222 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5223 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5224 return omit_one_operand (type, integer_zero_node, op0);
5225
5226 /* ... fall through ... */
5227
5228 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5229 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5230 /* If we can extract our operation from the LHS, do so and return a
5231 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5232 do something only if the second operand is a constant. */
5233 if (same_p
5234 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5235 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5236 fold_convert (ctype, op1)));
5237 else if (tcode == MULT_EXPR && code == MULT_EXPR
5238 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5239 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5240 fold_convert (ctype, t1)));
5241 else if (TREE_CODE (op1) != INTEGER_CST)
5242 return 0;
5243
5244 /* If these are the same operation types, we can associate them
5245 assuming no overflow. */
5246 if (tcode == code
5247 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5248 fold_convert (ctype, c), 0))
5249 && ! TREE_OVERFLOW (t1))
5250 return fold (build2 (tcode, ctype, fold_convert (ctype, op0), t1));
5251
5252 /* If these operations "cancel" each other, we have the main
5253 optimizations of this pass, which occur when either constant is a
5254 multiple of the other, in which case we replace this with either an
5255 operation or CODE or TCODE.
5256
5257 If we have an unsigned type that is not a sizetype, we cannot do
5258 this since it will change the result if the original computation
5259 overflowed. */
5260 if ((! TYPE_UNSIGNED (ctype)
5261 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5262 && ! flag_wrapv
5263 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5264 || (tcode == MULT_EXPR
5265 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5266 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5267 {
5268 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5269 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5270 fold_convert (ctype,
5271 const_binop (TRUNC_DIV_EXPR,
5272 op1, c, 0))));
5273 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5274 return fold (build2 (code, ctype, fold_convert (ctype, op0),
5275 fold_convert (ctype,
5276 const_binop (TRUNC_DIV_EXPR,
5277 c, op1, 0))));
5278 }
5279 break;
5280
5281 default:
5282 break;
5283 }
5284
5285 return 0;
5286 }
5287 \f
5288 /* Return a node which has the indicated constant VALUE (either 0 or
5289 1), and is of the indicated TYPE. */
5290
5291 static tree
5292 constant_boolean_node (int value, tree type)
5293 {
5294 if (type == integer_type_node)
5295 return value ? integer_one_node : integer_zero_node;
5296 else if (type == boolean_type_node)
5297 return value ? boolean_true_node : boolean_false_node;
5298 else if (TREE_CODE (type) == BOOLEAN_TYPE)
5299 return lang_hooks.truthvalue_conversion (value ? integer_one_node
5300 : integer_zero_node);
5301 else
5302 {
5303 tree t = build_int_2 (value, 0);
5304
5305 TREE_TYPE (t) = type;
5306 return t;
5307 }
5308 }
5309
5310 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5311 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5312 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5313 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5314 COND is the first argument to CODE; otherwise (as in the example
5315 given here), it is the second argument. TYPE is the type of the
5316 original expression. Return NULL_TREE if no simplification is
5317 possible. */
5318
5319 static tree
5320 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
5321 tree cond, tree arg, int cond_first_p)
5322 {
5323 tree test, true_value, false_value;
5324 tree lhs = NULL_TREE;
5325 tree rhs = NULL_TREE;
5326
5327 /* This transformation is only worthwhile if we don't have to wrap
5328 arg in a SAVE_EXPR, and the operation can be simplified on atleast
5329 one of the branches once its pushed inside the COND_EXPR. */
5330 if (!TREE_CONSTANT (arg))
5331 return NULL_TREE;
5332
5333 if (TREE_CODE (cond) == COND_EXPR)
5334 {
5335 test = TREE_OPERAND (cond, 0);
5336 true_value = TREE_OPERAND (cond, 1);
5337 false_value = TREE_OPERAND (cond, 2);
5338 /* If this operand throws an expression, then it does not make
5339 sense to try to perform a logical or arithmetic operation
5340 involving it. */
5341 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5342 lhs = true_value;
5343 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5344 rhs = false_value;
5345 }
5346 else
5347 {
5348 tree testtype = TREE_TYPE (cond);
5349 test = cond;
5350 true_value = constant_boolean_node (true, testtype);
5351 false_value = constant_boolean_node (false, testtype);
5352 }
5353
5354 if (lhs == 0)
5355 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
5356 : build2 (code, type, arg, true_value));
5357 if (rhs == 0)
5358 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
5359 : build2 (code, type, arg, false_value));
5360
5361 test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
5362 return fold_convert (type, test);
5363 }
5364
5365 \f
5366 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5367
5368 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5369 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5370 ADDEND is the same as X.
5371
5372 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5373 and finite. The problematic cases are when X is zero, and its mode
5374 has signed zeros. In the case of rounding towards -infinity,
5375 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5376 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5377
5378 static bool
5379 fold_real_zero_addition_p (tree type, tree addend, int negate)
5380 {
5381 if (!real_zerop (addend))
5382 return false;
5383
5384 /* Don't allow the fold with -fsignaling-nans. */
5385 if (HONOR_SNANS (TYPE_MODE (type)))
5386 return false;
5387
5388 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5389 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5390 return true;
5391
5392 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5393 if (TREE_CODE (addend) == REAL_CST
5394 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5395 negate = !negate;
5396
5397 /* The mode has signed zeros, and we have to honor their sign.
5398 In this situation, there is only one case we can return true for.
5399 X - 0 is the same as X unless rounding towards -infinity is
5400 supported. */
5401 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5402 }
5403
5404 /* Subroutine of fold() that checks comparisons of built-in math
5405 functions against real constants.
5406
5407 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5408 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5409 is the type of the result and ARG0 and ARG1 are the operands of the
5410 comparison. ARG1 must be a TREE_REAL_CST.
5411
5412 The function returns the constant folded tree if a simplification
5413 can be made, and NULL_TREE otherwise. */
5414
5415 static tree
5416 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5417 tree type, tree arg0, tree arg1)
5418 {
5419 REAL_VALUE_TYPE c;
5420
5421 if (BUILTIN_SQRT_P (fcode))
5422 {
5423 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5424 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5425
5426 c = TREE_REAL_CST (arg1);
5427 if (REAL_VALUE_NEGATIVE (c))
5428 {
5429 /* sqrt(x) < y is always false, if y is negative. */
5430 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5431 return omit_one_operand (type, integer_zero_node, arg);
5432
5433 /* sqrt(x) > y is always true, if y is negative and we
5434 don't care about NaNs, i.e. negative values of x. */
5435 if (code == NE_EXPR || !HONOR_NANS (mode))
5436 return omit_one_operand (type, integer_one_node, arg);
5437
5438 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5439 return fold (build2 (GE_EXPR, type, arg,
5440 build_real (TREE_TYPE (arg), dconst0)));
5441 }
5442 else if (code == GT_EXPR || code == GE_EXPR)
5443 {
5444 REAL_VALUE_TYPE c2;
5445
5446 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5447 real_convert (&c2, mode, &c2);
5448
5449 if (REAL_VALUE_ISINF (c2))
5450 {
5451 /* sqrt(x) > y is x == +Inf, when y is very large. */
5452 if (HONOR_INFINITIES (mode))
5453 return fold (build2 (EQ_EXPR, type, arg,
5454 build_real (TREE_TYPE (arg), c2)));
5455
5456 /* sqrt(x) > y is always false, when y is very large
5457 and we don't care about infinities. */
5458 return omit_one_operand (type, integer_zero_node, arg);
5459 }
5460
5461 /* sqrt(x) > c is the same as x > c*c. */
5462 return fold (build2 (code, type, arg,
5463 build_real (TREE_TYPE (arg), c2)));
5464 }
5465 else if (code == LT_EXPR || code == LE_EXPR)
5466 {
5467 REAL_VALUE_TYPE c2;
5468
5469 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5470 real_convert (&c2, mode, &c2);
5471
5472 if (REAL_VALUE_ISINF (c2))
5473 {
5474 /* sqrt(x) < y is always true, when y is a very large
5475 value and we don't care about NaNs or Infinities. */
5476 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5477 return omit_one_operand (type, integer_one_node, arg);
5478
5479 /* sqrt(x) < y is x != +Inf when y is very large and we
5480 don't care about NaNs. */
5481 if (! HONOR_NANS (mode))
5482 return fold (build2 (NE_EXPR, type, arg,
5483 build_real (TREE_TYPE (arg), c2)));
5484
5485 /* sqrt(x) < y is x >= 0 when y is very large and we
5486 don't care about Infinities. */
5487 if (! HONOR_INFINITIES (mode))
5488 return fold (build2 (GE_EXPR, type, arg,
5489 build_real (TREE_TYPE (arg), dconst0)));
5490
5491 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5492 if (lang_hooks.decls.global_bindings_p () != 0
5493 || CONTAINS_PLACEHOLDER_P (arg))
5494 return NULL_TREE;
5495
5496 arg = save_expr (arg);
5497 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5498 fold (build2 (GE_EXPR, type, arg,
5499 build_real (TREE_TYPE (arg),
5500 dconst0))),
5501 fold (build2 (NE_EXPR, type, arg,
5502 build_real (TREE_TYPE (arg),
5503 c2)))));
5504 }
5505
5506 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5507 if (! HONOR_NANS (mode))
5508 return fold (build2 (code, type, arg,
5509 build_real (TREE_TYPE (arg), c2)));
5510
5511 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5512 if (lang_hooks.decls.global_bindings_p () == 0
5513 && ! CONTAINS_PLACEHOLDER_P (arg))
5514 {
5515 arg = save_expr (arg);
5516 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5517 fold (build2 (GE_EXPR, type, arg,
5518 build_real (TREE_TYPE (arg),
5519 dconst0))),
5520 fold (build2 (code, type, arg,
5521 build_real (TREE_TYPE (arg),
5522 c2)))));
5523 }
5524 }
5525 }
5526
5527 return NULL_TREE;
5528 }
5529
5530 /* Subroutine of fold() that optimizes comparisons against Infinities,
5531 either +Inf or -Inf.
5532
5533 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5534 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5535 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5536
5537 The function returns the constant folded tree if a simplification
5538 can be made, and NULL_TREE otherwise. */
5539
5540 static tree
5541 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5542 {
5543 enum machine_mode mode;
5544 REAL_VALUE_TYPE max;
5545 tree temp;
5546 bool neg;
5547
5548 mode = TYPE_MODE (TREE_TYPE (arg0));
5549
5550 /* For negative infinity swap the sense of the comparison. */
5551 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5552 if (neg)
5553 code = swap_tree_comparison (code);
5554
5555 switch (code)
5556 {
5557 case GT_EXPR:
5558 /* x > +Inf is always false, if with ignore sNANs. */
5559 if (HONOR_SNANS (mode))
5560 return NULL_TREE;
5561 return omit_one_operand (type, integer_zero_node, arg0);
5562
5563 case LE_EXPR:
5564 /* x <= +Inf is always true, if we don't case about NaNs. */
5565 if (! HONOR_NANS (mode))
5566 return omit_one_operand (type, integer_one_node, arg0);
5567
5568 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5569 if (lang_hooks.decls.global_bindings_p () == 0
5570 && ! CONTAINS_PLACEHOLDER_P (arg0))
5571 {
5572 arg0 = save_expr (arg0);
5573 return fold (build2 (EQ_EXPR, type, arg0, arg0));
5574 }
5575 break;
5576
5577 case EQ_EXPR:
5578 case GE_EXPR:
5579 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5580 real_maxval (&max, neg, mode);
5581 return fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5582 arg0, build_real (TREE_TYPE (arg0), max)));
5583
5584 case LT_EXPR:
5585 /* x < +Inf is always equal to x <= DBL_MAX. */
5586 real_maxval (&max, neg, mode);
5587 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5588 arg0, build_real (TREE_TYPE (arg0), max)));
5589
5590 case NE_EXPR:
5591 /* x != +Inf is always equal to !(x > DBL_MAX). */
5592 real_maxval (&max, neg, mode);
5593 if (! HONOR_NANS (mode))
5594 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5595 arg0, build_real (TREE_TYPE (arg0), max)));
5596
5597 /* The transformation below creates non-gimple code and thus is
5598 not appropriate if we are in gimple form. */
5599 if (in_gimple_form)
5600 return NULL_TREE;
5601
5602 temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5603 arg0, build_real (TREE_TYPE (arg0), max)));
5604 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5605
5606 default:
5607 break;
5608 }
5609
5610 return NULL_TREE;
5611 }
5612
5613 /* Subroutine of fold() that optimizes comparisons of a division by
5614 a nonzero integer constant against an integer constant, i.e.
5615 X/C1 op C2.
5616
5617 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5618 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5619 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5620
5621 The function returns the constant folded tree if a simplification
5622 can be made, and NULL_TREE otherwise. */
5623
5624 static tree
5625 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5626 {
5627 tree prod, tmp, hi, lo;
5628 tree arg00 = TREE_OPERAND (arg0, 0);
5629 tree arg01 = TREE_OPERAND (arg0, 1);
5630 unsigned HOST_WIDE_INT lpart;
5631 HOST_WIDE_INT hpart;
5632 int overflow;
5633
5634 /* We have to do this the hard way to detect unsigned overflow.
5635 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5636 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5637 TREE_INT_CST_HIGH (arg01),
5638 TREE_INT_CST_LOW (arg1),
5639 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5640 prod = build_int_2 (lpart, hpart);
5641 TREE_TYPE (prod) = TREE_TYPE (arg00);
5642 TREE_OVERFLOW (prod) = force_fit_type (prod, overflow)
5643 || TREE_INT_CST_HIGH (prod) != hpart
5644 || TREE_INT_CST_LOW (prod) != lpart;
5645 TREE_CONSTANT_OVERFLOW (prod) = TREE_OVERFLOW (prod);
5646
5647 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5648 {
5649 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5650 lo = prod;
5651
5652 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5653 overflow = add_double (TREE_INT_CST_LOW (prod),
5654 TREE_INT_CST_HIGH (prod),
5655 TREE_INT_CST_LOW (tmp),
5656 TREE_INT_CST_HIGH (tmp),
5657 &lpart, &hpart);
5658 hi = build_int_2 (lpart, hpart);
5659 TREE_TYPE (hi) = TREE_TYPE (arg00);
5660 TREE_OVERFLOW (hi) = force_fit_type (hi, overflow)
5661 || TREE_INT_CST_HIGH (hi) != hpart
5662 || TREE_INT_CST_LOW (hi) != lpart
5663 || TREE_OVERFLOW (prod);
5664 TREE_CONSTANT_OVERFLOW (hi) = TREE_OVERFLOW (hi);
5665 }
5666 else if (tree_int_cst_sgn (arg01) >= 0)
5667 {
5668 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5669 switch (tree_int_cst_sgn (arg1))
5670 {
5671 case -1:
5672 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5673 hi = prod;
5674 break;
5675
5676 case 0:
5677 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5678 hi = tmp;
5679 break;
5680
5681 case 1:
5682 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5683 lo = prod;
5684 break;
5685
5686 default:
5687 abort ();
5688 }
5689 }
5690 else
5691 {
5692 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5693 switch (tree_int_cst_sgn (arg1))
5694 {
5695 case -1:
5696 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5697 lo = prod;
5698 break;
5699
5700 case 0:
5701 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5702 lo = tmp;
5703 break;
5704
5705 case 1:
5706 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5707 hi = prod;
5708 break;
5709
5710 default:
5711 abort ();
5712 }
5713 }
5714
5715 switch (code)
5716 {
5717 case EQ_EXPR:
5718 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5719 return omit_one_operand (type, integer_zero_node, arg00);
5720 if (TREE_OVERFLOW (hi))
5721 return fold (build2 (GE_EXPR, type, arg00, lo));
5722 if (TREE_OVERFLOW (lo))
5723 return fold (build2 (LE_EXPR, type, arg00, hi));
5724 return build_range_check (type, arg00, 1, lo, hi);
5725
5726 case NE_EXPR:
5727 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5728 return omit_one_operand (type, integer_one_node, arg00);
5729 if (TREE_OVERFLOW (hi))
5730 return fold (build2 (LT_EXPR, type, arg00, lo));
5731 if (TREE_OVERFLOW (lo))
5732 return fold (build2 (GT_EXPR, type, arg00, hi));
5733 return build_range_check (type, arg00, 0, lo, hi);
5734
5735 case LT_EXPR:
5736 if (TREE_OVERFLOW (lo))
5737 return omit_one_operand (type, integer_zero_node, arg00);
5738 return fold (build2 (LT_EXPR, type, arg00, lo));
5739
5740 case LE_EXPR:
5741 if (TREE_OVERFLOW (hi))
5742 return omit_one_operand (type, integer_one_node, arg00);
5743 return fold (build2 (LE_EXPR, type, arg00, hi));
5744
5745 case GT_EXPR:
5746 if (TREE_OVERFLOW (hi))
5747 return omit_one_operand (type, integer_zero_node, arg00);
5748 return fold (build2 (GT_EXPR, type, arg00, hi));
5749
5750 case GE_EXPR:
5751 if (TREE_OVERFLOW (lo))
5752 return omit_one_operand (type, integer_one_node, arg00);
5753 return fold (build2 (GE_EXPR, type, arg00, lo));
5754
5755 default:
5756 break;
5757 }
5758
5759 return NULL_TREE;
5760 }
5761
5762
5763 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5764 equality/inequality test, then return a simplified form of
5765 the test using shifts and logical operations. Otherwise return
5766 NULL. TYPE is the desired result type. */
5767
5768 tree
5769 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5770 tree result_type)
5771 {
5772 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5773 operand 0. */
5774 if (code == TRUTH_NOT_EXPR)
5775 {
5776 code = TREE_CODE (arg0);
5777 if (code != NE_EXPR && code != EQ_EXPR)
5778 return NULL_TREE;
5779
5780 /* Extract the arguments of the EQ/NE. */
5781 arg1 = TREE_OPERAND (arg0, 1);
5782 arg0 = TREE_OPERAND (arg0, 0);
5783
5784 /* This requires us to invert the code. */
5785 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5786 }
5787
5788 /* If this is testing a single bit, we can optimize the test. */
5789 if ((code == NE_EXPR || code == EQ_EXPR)
5790 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5791 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5792 {
5793 tree inner = TREE_OPERAND (arg0, 0);
5794 tree type = TREE_TYPE (arg0);
5795 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5796 enum machine_mode operand_mode = TYPE_MODE (type);
5797 int ops_unsigned;
5798 tree signed_type, unsigned_type, intermediate_type;
5799 tree arg00;
5800
5801 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5802 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5803 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5804 if (arg00 != NULL_TREE
5805 /* This is only a win if casting to a signed type is cheap,
5806 i.e. when arg00's type is not a partial mode. */
5807 && TYPE_PRECISION (TREE_TYPE (arg00))
5808 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5809 {
5810 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5811 return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5812 result_type, fold_convert (stype, arg00),
5813 fold_convert (stype, integer_zero_node)));
5814 }
5815
5816 /* Otherwise we have (A & C) != 0 where C is a single bit,
5817 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5818 Similarly for (A & C) == 0. */
5819
5820 /* If INNER is a right shift of a constant and it plus BITNUM does
5821 not overflow, adjust BITNUM and INNER. */
5822 if (TREE_CODE (inner) == RSHIFT_EXPR
5823 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5824 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5825 && bitnum < TYPE_PRECISION (type)
5826 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5827 bitnum - TYPE_PRECISION (type)))
5828 {
5829 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5830 inner = TREE_OPERAND (inner, 0);
5831 }
5832
5833 /* If we are going to be able to omit the AND below, we must do our
5834 operations as unsigned. If we must use the AND, we have a choice.
5835 Normally unsigned is faster, but for some machines signed is. */
5836 #ifdef LOAD_EXTEND_OP
5837 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5838 #else
5839 ops_unsigned = 1;
5840 #endif
5841
5842 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5843 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5844 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5845 inner = fold_convert (intermediate_type, inner);
5846
5847 if (bitnum != 0)
5848 inner = build2 (RSHIFT_EXPR, intermediate_type,
5849 inner, size_int (bitnum));
5850
5851 if (code == EQ_EXPR)
5852 inner = fold (build2 (BIT_XOR_EXPR, intermediate_type,
5853 inner, integer_one_node));
5854
5855 /* Put the AND last so it can combine with more things. */
5856 inner = build2 (BIT_AND_EXPR, intermediate_type,
5857 inner, integer_one_node);
5858
5859 /* Make sure to return the proper type. */
5860 inner = fold_convert (result_type, inner);
5861
5862 return inner;
5863 }
5864 return NULL_TREE;
5865 }
5866
5867 /* Check whether we are allowed to reorder operands arg0 and arg1,
5868 such that the evaluation of arg1 occurs before arg0. */
5869
5870 static bool
5871 reorder_operands_p (tree arg0, tree arg1)
5872 {
5873 if (! flag_evaluation_order)
5874 return true;
5875 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5876 return true;
5877 return ! TREE_SIDE_EFFECTS (arg0)
5878 && ! TREE_SIDE_EFFECTS (arg1);
5879 }
5880
5881 /* Test whether it is preferable two swap two operands, ARG0 and
5882 ARG1, for example because ARG0 is an integer constant and ARG1
5883 isn't. If REORDER is true, only recommend swapping if we can
5884 evaluate the operands in reverse order. */
5885
5886 bool
5887 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5888 {
5889 STRIP_SIGN_NOPS (arg0);
5890 STRIP_SIGN_NOPS (arg1);
5891
5892 if (TREE_CODE (arg1) == INTEGER_CST)
5893 return 0;
5894 if (TREE_CODE (arg0) == INTEGER_CST)
5895 return 1;
5896
5897 if (TREE_CODE (arg1) == REAL_CST)
5898 return 0;
5899 if (TREE_CODE (arg0) == REAL_CST)
5900 return 1;
5901
5902 if (TREE_CODE (arg1) == COMPLEX_CST)
5903 return 0;
5904 if (TREE_CODE (arg0) == COMPLEX_CST)
5905 return 1;
5906
5907 if (TREE_CONSTANT (arg1))
5908 return 0;
5909 if (TREE_CONSTANT (arg0))
5910 return 1;
5911
5912 if (optimize_size)
5913 return 0;
5914
5915 if (reorder && flag_evaluation_order
5916 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5917 return 0;
5918
5919 if (DECL_P (arg1))
5920 return 0;
5921 if (DECL_P (arg0))
5922 return 1;
5923
5924 if (reorder && flag_evaluation_order
5925 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5926 return 0;
5927
5928 if (DECL_P (arg1))
5929 return 0;
5930 if (DECL_P (arg0))
5931 return 1;
5932
5933 /* It is preferable to swap two SSA_NAME to ensure a canonical form
5934 for commutative and comparison operators. Ensuring a canonical
5935 form allows the optimizers to find additional redundancies without
5936 having to explicitly check for both orderings. */
5937 if (TREE_CODE (arg0) == SSA_NAME
5938 && TREE_CODE (arg1) == SSA_NAME
5939 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
5940 return 1;
5941
5942 return 0;
5943 }
5944
5945 /* Perform constant folding and related simplification of EXPR.
5946 The related simplifications include x*1 => x, x*0 => 0, etc.,
5947 and application of the associative law.
5948 NOP_EXPR conversions may be removed freely (as long as we
5949 are careful not to change the type of the overall expression).
5950 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5951 but we can constant-fold them if they have constant operands. */
5952
5953 #ifdef ENABLE_FOLD_CHECKING
5954 # define fold(x) fold_1 (x)
5955 static tree fold_1 (tree);
5956 static
5957 #endif
5958 tree
5959 fold (tree expr)
5960 {
5961 const tree t = expr;
5962 const tree type = TREE_TYPE (expr);
5963 tree t1 = NULL_TREE;
5964 tree tem;
5965 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5966 enum tree_code code = TREE_CODE (t);
5967 int kind = TREE_CODE_CLASS (code);
5968
5969 /* WINS will be nonzero when the switch is done
5970 if all operands are constant. */
5971 int wins = 1;
5972
5973 /* Return right away if a constant. */
5974 if (kind == 'c')
5975 return t;
5976
5977 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5978 {
5979 tree subop;
5980
5981 /* Special case for conversion ops that can have fixed point args. */
5982 arg0 = TREE_OPERAND (t, 0);
5983
5984 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5985 if (arg0 != 0)
5986 STRIP_SIGN_NOPS (arg0);
5987
5988 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5989 subop = TREE_REALPART (arg0);
5990 else
5991 subop = arg0;
5992
5993 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5994 && TREE_CODE (subop) != REAL_CST)
5995 /* Note that TREE_CONSTANT isn't enough:
5996 static var addresses are constant but we can't
5997 do arithmetic on them. */
5998 wins = 0;
5999 }
6000 else if (IS_EXPR_CODE_CLASS (kind))
6001 {
6002 int len = first_rtl_op (code);
6003 int i;
6004 for (i = 0; i < len; i++)
6005 {
6006 tree op = TREE_OPERAND (t, i);
6007 tree subop;
6008
6009 if (op == 0)
6010 continue; /* Valid for CALL_EXPR, at least. */
6011
6012 /* Strip any conversions that don't change the mode. This is
6013 safe for every expression, except for a comparison expression
6014 because its signedness is derived from its operands. So, in
6015 the latter case, only strip conversions that don't change the
6016 signedness.
6017
6018 Note that this is done as an internal manipulation within the
6019 constant folder, in order to find the simplest representation
6020 of the arguments so that their form can be studied. In any
6021 cases, the appropriate type conversions should be put back in
6022 the tree that will get out of the constant folder. */
6023 if (kind == '<')
6024 STRIP_SIGN_NOPS (op);
6025 else
6026 STRIP_NOPS (op);
6027
6028 if (TREE_CODE (op) == COMPLEX_CST)
6029 subop = TREE_REALPART (op);
6030 else
6031 subop = op;
6032
6033 if (TREE_CODE (subop) != INTEGER_CST
6034 && TREE_CODE (subop) != REAL_CST)
6035 /* Note that TREE_CONSTANT isn't enough:
6036 static var addresses are constant but we can't
6037 do arithmetic on them. */
6038 wins = 0;
6039
6040 if (i == 0)
6041 arg0 = op;
6042 else if (i == 1)
6043 arg1 = op;
6044 }
6045 }
6046
6047 /* If this is a commutative operation, and ARG0 is a constant, move it
6048 to ARG1 to reduce the number of tests below. */
6049 if (commutative_tree_code (code)
6050 && tree_swap_operands_p (arg0, arg1, true))
6051 return fold (build2 (code, type, TREE_OPERAND (t, 1),
6052 TREE_OPERAND (t, 0)));
6053
6054 /* Now WINS is set as described above,
6055 ARG0 is the first operand of EXPR,
6056 and ARG1 is the second operand (if it has more than one operand).
6057
6058 First check for cases where an arithmetic operation is applied to a
6059 compound, conditional, or comparison operation. Push the arithmetic
6060 operation inside the compound or conditional to see if any folding
6061 can then be done. Convert comparison to conditional for this purpose.
6062 The also optimizes non-constant cases that used to be done in
6063 expand_expr.
6064
6065 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
6066 one of the operands is a comparison and the other is a comparison, a
6067 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
6068 code below would make the expression more complex. Change it to a
6069 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
6070 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
6071
6072 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
6073 || code == EQ_EXPR || code == NE_EXPR)
6074 && ((truth_value_p (TREE_CODE (arg0))
6075 && (truth_value_p (TREE_CODE (arg1))
6076 || (TREE_CODE (arg1) == BIT_AND_EXPR
6077 && integer_onep (TREE_OPERAND (arg1, 1)))))
6078 || (truth_value_p (TREE_CODE (arg1))
6079 && (truth_value_p (TREE_CODE (arg0))
6080 || (TREE_CODE (arg0) == BIT_AND_EXPR
6081 && integer_onep (TREE_OPERAND (arg0, 1)))))))
6082 {
6083 tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
6084 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
6085 : TRUTH_XOR_EXPR,
6086 type, fold_convert (boolean_type_node, arg0),
6087 fold_convert (boolean_type_node, arg1)));
6088
6089 if (code == EQ_EXPR)
6090 tem = invert_truthvalue (tem);
6091
6092 return tem;
6093 }
6094
6095 if (TREE_CODE_CLASS (code) == '1')
6096 {
6097 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6098 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6099 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
6100 else if (TREE_CODE (arg0) == COND_EXPR)
6101 {
6102 tree arg01 = TREE_OPERAND (arg0, 1);
6103 tree arg02 = TREE_OPERAND (arg0, 2);
6104 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6105 arg01 = fold (build1 (code, type, arg01));
6106 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6107 arg02 = fold (build1 (code, type, arg02));
6108 tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6109 arg01, arg02));
6110
6111 /* If this was a conversion, and all we did was to move into
6112 inside the COND_EXPR, bring it back out. But leave it if
6113 it is a conversion from integer to integer and the
6114 result precision is no wider than a word since such a
6115 conversion is cheap and may be optimized away by combine,
6116 while it couldn't if it were outside the COND_EXPR. Then return
6117 so we don't get into an infinite recursion loop taking the
6118 conversion out and then back in. */
6119
6120 if ((code == NOP_EXPR || code == CONVERT_EXPR
6121 || code == NON_LVALUE_EXPR)
6122 && TREE_CODE (tem) == COND_EXPR
6123 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6124 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6125 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6126 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6127 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6128 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6129 && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6130 && (INTEGRAL_TYPE_P
6131 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6132 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD))
6133 tem = build1 (code, type,
6134 build3 (COND_EXPR,
6135 TREE_TYPE (TREE_OPERAND
6136 (TREE_OPERAND (tem, 1), 0)),
6137 TREE_OPERAND (tem, 0),
6138 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6139 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6140 return tem;
6141 }
6142 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
6143 {
6144 if (TREE_CODE (type) == BOOLEAN_TYPE)
6145 {
6146 arg0 = copy_node (arg0);
6147 TREE_TYPE (arg0) = type;
6148 return arg0;
6149 }
6150 else if (TREE_CODE (type) != INTEGER_TYPE)
6151 return fold (build3 (COND_EXPR, type, arg0,
6152 fold (build1 (code, type,
6153 integer_one_node)),
6154 fold (build1 (code, type,
6155 integer_zero_node))));
6156 }
6157 }
6158 else if (TREE_CODE_CLASS (code) == '<'
6159 && TREE_CODE (arg0) == COMPOUND_EXPR)
6160 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6161 fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
6162 else if (TREE_CODE_CLASS (code) == '<'
6163 && TREE_CODE (arg1) == COMPOUND_EXPR)
6164 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6165 fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
6166 else if (TREE_CODE_CLASS (code) == '2'
6167 || TREE_CODE_CLASS (code) == '<')
6168 {
6169 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6170 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6171 fold (build2 (code, type, TREE_OPERAND (arg0, 1),
6172 arg1)));
6173 if (TREE_CODE (arg1) == COMPOUND_EXPR
6174 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
6175 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6176 fold (build2 (code, type,
6177 arg0, TREE_OPERAND (arg1, 1))));
6178
6179 if (TREE_CODE (arg0) == COND_EXPR
6180 || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
6181 {
6182 tem = fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
6183 /*cond_first_p=*/1);
6184 if (tem != NULL_TREE)
6185 return tem;
6186 }
6187
6188 if (TREE_CODE (arg1) == COND_EXPR
6189 || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
6190 {
6191 tem = fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
6192 /*cond_first_p=*/0);
6193 if (tem != NULL_TREE)
6194 return tem;
6195 }
6196 }
6197
6198 switch (code)
6199 {
6200 case CONST_DECL:
6201 return fold (DECL_INITIAL (t));
6202
6203 case NOP_EXPR:
6204 case FLOAT_EXPR:
6205 case CONVERT_EXPR:
6206 case FIX_TRUNC_EXPR:
6207 case FIX_CEIL_EXPR:
6208 case FIX_FLOOR_EXPR:
6209 case FIX_ROUND_EXPR:
6210 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
6211 return TREE_OPERAND (t, 0);
6212
6213 /* Handle cases of two conversions in a row. */
6214 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
6215 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
6216 {
6217 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6218 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
6219 int inside_int = INTEGRAL_TYPE_P (inside_type);
6220 int inside_ptr = POINTER_TYPE_P (inside_type);
6221 int inside_float = FLOAT_TYPE_P (inside_type);
6222 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6223 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6224 int inter_int = INTEGRAL_TYPE_P (inter_type);
6225 int inter_ptr = POINTER_TYPE_P (inter_type);
6226 int inter_float = FLOAT_TYPE_P (inter_type);
6227 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6228 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6229 int final_int = INTEGRAL_TYPE_P (type);
6230 int final_ptr = POINTER_TYPE_P (type);
6231 int final_float = FLOAT_TYPE_P (type);
6232 unsigned int final_prec = TYPE_PRECISION (type);
6233 int final_unsignedp = TYPE_UNSIGNED (type);
6234
6235 /* In addition to the cases of two conversions in a row
6236 handled below, if we are converting something to its own
6237 type via an object of identical or wider precision, neither
6238 conversion is needed. */
6239 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6240 && ((inter_int && final_int) || (inter_float && final_float))
6241 && inter_prec >= final_prec)
6242 return fold (build1 (code, type,
6243 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6244
6245 /* Likewise, if the intermediate and final types are either both
6246 float or both integer, we don't need the middle conversion if
6247 it is wider than the final type and doesn't change the signedness
6248 (for integers). Avoid this if the final type is a pointer
6249 since then we sometimes need the inner conversion. Likewise if
6250 the outer has a precision not equal to the size of its mode. */
6251 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6252 || (inter_float && inside_float))
6253 && inter_prec >= inside_prec
6254 && (inter_float || inter_unsignedp == inside_unsignedp)
6255 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6256 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6257 && ! final_ptr)
6258 return fold (build1 (code, type,
6259 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6260
6261 /* If we have a sign-extension of a zero-extended value, we can
6262 replace that by a single zero-extension. */
6263 if (inside_int && inter_int && final_int
6264 && inside_prec < inter_prec && inter_prec < final_prec
6265 && inside_unsignedp && !inter_unsignedp)
6266 return fold (build1 (code, type,
6267 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6268
6269 /* Two conversions in a row are not needed unless:
6270 - some conversion is floating-point (overstrict for now), or
6271 - the intermediate type is narrower than both initial and
6272 final, or
6273 - the intermediate type and innermost type differ in signedness,
6274 and the outermost type is wider than the intermediate, or
6275 - the initial type is a pointer type and the precisions of the
6276 intermediate and final types differ, or
6277 - the final type is a pointer type and the precisions of the
6278 initial and intermediate types differ. */
6279 if (! inside_float && ! inter_float && ! final_float
6280 && (inter_prec > inside_prec || inter_prec > final_prec)
6281 && ! (inside_int && inter_int
6282 && inter_unsignedp != inside_unsignedp
6283 && inter_prec < final_prec)
6284 && ((inter_unsignedp && inter_prec > inside_prec)
6285 == (final_unsignedp && final_prec > inter_prec))
6286 && ! (inside_ptr && inter_prec != final_prec)
6287 && ! (final_ptr && inside_prec != inter_prec)
6288 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6289 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6290 && ! final_ptr)
6291 return fold (build1 (code, type,
6292 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6293 }
6294
6295 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
6296 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
6297 /* Detect assigning a bitfield. */
6298 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
6299 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
6300 {
6301 /* Don't leave an assignment inside a conversion
6302 unless assigning a bitfield. */
6303 tree prev = TREE_OPERAND (t, 0);
6304 tem = copy_node (t);
6305 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
6306 /* First do the assignment, then return converted constant. */
6307 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
6308 TREE_NO_WARNING (tem) = 1;
6309 TREE_USED (tem) = 1;
6310 return tem;
6311 }
6312
6313 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6314 constants (if x has signed type, the sign bit cannot be set
6315 in c). This folds extension into the BIT_AND_EXPR. */
6316 if (INTEGRAL_TYPE_P (type)
6317 && TREE_CODE (type) != BOOLEAN_TYPE
6318 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
6319 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
6320 {
6321 tree and = TREE_OPERAND (t, 0);
6322 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6323 int change = 0;
6324
6325 if (TYPE_UNSIGNED (TREE_TYPE (and))
6326 || (TYPE_PRECISION (type)
6327 <= TYPE_PRECISION (TREE_TYPE (and))))
6328 change = 1;
6329 else if (TYPE_PRECISION (TREE_TYPE (and1))
6330 <= HOST_BITS_PER_WIDE_INT
6331 && host_integerp (and1, 1))
6332 {
6333 unsigned HOST_WIDE_INT cst;
6334
6335 cst = tree_low_cst (and1, 1);
6336 cst &= (HOST_WIDE_INT) -1
6337 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6338 change = (cst == 0);
6339 #ifdef LOAD_EXTEND_OP
6340 if (change
6341 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6342 == ZERO_EXTEND))
6343 {
6344 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6345 and0 = fold_convert (uns, and0);
6346 and1 = fold_convert (uns, and1);
6347 }
6348 #endif
6349 }
6350 if (change)
6351 return fold (build2 (BIT_AND_EXPR, type,
6352 fold_convert (type, and0),
6353 fold_convert (type, and1)));
6354 }
6355
6356 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6357 T2 being pointers to types of the same size. */
6358 if (POINTER_TYPE_P (TREE_TYPE (t))
6359 && TREE_CODE_CLASS (TREE_CODE (arg0)) == '2'
6360 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6361 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6362 {
6363 tree arg00 = TREE_OPERAND (arg0, 0);
6364 tree t0 = TREE_TYPE (t);
6365 tree t1 = TREE_TYPE (arg00);
6366 tree tt0 = TREE_TYPE (t0);
6367 tree tt1 = TREE_TYPE (t1);
6368 tree s0 = TYPE_SIZE (tt0);
6369 tree s1 = TYPE_SIZE (tt1);
6370
6371 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6372 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6373 TREE_OPERAND (arg0, 1));
6374 }
6375
6376 tem = fold_convert_const (code, type, arg0);
6377 return tem ? tem : t;
6378
6379 case VIEW_CONVERT_EXPR:
6380 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
6381 return build1 (VIEW_CONVERT_EXPR, type,
6382 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6383 return t;
6384
6385 case COMPONENT_REF:
6386 if (TREE_CODE (arg0) == CONSTRUCTOR
6387 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
6388 {
6389 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
6390 if (m)
6391 return TREE_VALUE (m);
6392 }
6393 return t;
6394
6395 case RANGE_EXPR:
6396 if (TREE_CONSTANT (t) != wins)
6397 {
6398 tem = copy_node (t);
6399 TREE_CONSTANT (tem) = wins;
6400 TREE_INVARIANT (tem) = wins;
6401 return tem;
6402 }
6403 return t;
6404
6405 case NEGATE_EXPR:
6406 if (negate_expr_p (arg0))
6407 return fold_convert (type, negate_expr (arg0));
6408 return t;
6409
6410 case ABS_EXPR:
6411 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6412 return fold_abs_const (arg0, type);
6413 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6414 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
6415 /* Convert fabs((double)float) into (double)fabsf(float). */
6416 else if (TREE_CODE (arg0) == NOP_EXPR
6417 && TREE_CODE (type) == REAL_TYPE)
6418 {
6419 tree targ0 = strip_float_extensions (arg0);
6420 if (targ0 != arg0)
6421 return fold_convert (type, fold (build1 (ABS_EXPR,
6422 TREE_TYPE (targ0),
6423 targ0)));
6424 }
6425 else if (tree_expr_nonnegative_p (arg0))
6426 return arg0;
6427 return t;
6428
6429 case CONJ_EXPR:
6430 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6431 return fold_convert (type, arg0);
6432 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6433 return build2 (COMPLEX_EXPR, type,
6434 TREE_OPERAND (arg0, 0),
6435 negate_expr (TREE_OPERAND (arg0, 1)));
6436 else if (TREE_CODE (arg0) == COMPLEX_CST)
6437 return build_complex (type, TREE_REALPART (arg0),
6438 negate_expr (TREE_IMAGPART (arg0)));
6439 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6440 return fold (build2 (TREE_CODE (arg0), type,
6441 fold (build1 (CONJ_EXPR, type,
6442 TREE_OPERAND (arg0, 0))),
6443 fold (build1 (CONJ_EXPR, type,
6444 TREE_OPERAND (arg0, 1)))));
6445 else if (TREE_CODE (arg0) == CONJ_EXPR)
6446 return TREE_OPERAND (arg0, 0);
6447 return t;
6448
6449 case BIT_NOT_EXPR:
6450 if (TREE_CODE (arg0) == INTEGER_CST)
6451 return fold_not_const (arg0, type);
6452 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6453 return TREE_OPERAND (arg0, 0);
6454 return t;
6455
6456 case PLUS_EXPR:
6457 /* A + (-B) -> A - B */
6458 if (TREE_CODE (arg1) == NEGATE_EXPR)
6459 return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6460 /* (-A) + B -> B - A */
6461 if (TREE_CODE (arg0) == NEGATE_EXPR
6462 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
6463 return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
6464 if (! FLOAT_TYPE_P (type))
6465 {
6466 if (integer_zerop (arg1))
6467 return non_lvalue (fold_convert (type, arg0));
6468
6469 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
6470 with a constant, and the two constants have no bits in common,
6471 we should treat this as a BIT_IOR_EXPR since this may produce more
6472 simplifications. */
6473 if (TREE_CODE (arg0) == BIT_AND_EXPR
6474 && TREE_CODE (arg1) == BIT_AND_EXPR
6475 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6476 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6477 && integer_zerop (const_binop (BIT_AND_EXPR,
6478 TREE_OPERAND (arg0, 1),
6479 TREE_OPERAND (arg1, 1), 0)))
6480 {
6481 code = BIT_IOR_EXPR;
6482 goto bit_ior;
6483 }
6484
6485 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
6486 (plus (plus (mult) (mult)) (foo)) so that we can
6487 take advantage of the factoring cases below. */
6488 if ((TREE_CODE (arg0) == PLUS_EXPR
6489 && TREE_CODE (arg1) == MULT_EXPR)
6490 || (TREE_CODE (arg1) == PLUS_EXPR
6491 && TREE_CODE (arg0) == MULT_EXPR))
6492 {
6493 tree parg0, parg1, parg, marg;
6494
6495 if (TREE_CODE (arg0) == PLUS_EXPR)
6496 parg = arg0, marg = arg1;
6497 else
6498 parg = arg1, marg = arg0;
6499 parg0 = TREE_OPERAND (parg, 0);
6500 parg1 = TREE_OPERAND (parg, 1);
6501 STRIP_NOPS (parg0);
6502 STRIP_NOPS (parg1);
6503
6504 if (TREE_CODE (parg0) == MULT_EXPR
6505 && TREE_CODE (parg1) != MULT_EXPR)
6506 return fold (build2 (PLUS_EXPR, type,
6507 fold (build2 (PLUS_EXPR, type,
6508 fold_convert (type, parg0),
6509 fold_convert (type, marg))),
6510 fold_convert (type, parg1)));
6511 if (TREE_CODE (parg0) != MULT_EXPR
6512 && TREE_CODE (parg1) == MULT_EXPR)
6513 return fold (build2 (PLUS_EXPR, type,
6514 fold (build2 (PLUS_EXPR, type,
6515 fold_convert (type, parg1),
6516 fold_convert (type, marg))),
6517 fold_convert (type, parg0)));
6518 }
6519
6520 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
6521 {
6522 tree arg00, arg01, arg10, arg11;
6523 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6524
6525 /* (A * C) + (B * C) -> (A+B) * C.
6526 We are most concerned about the case where C is a constant,
6527 but other combinations show up during loop reduction. Since
6528 it is not difficult, try all four possibilities. */
6529
6530 arg00 = TREE_OPERAND (arg0, 0);
6531 arg01 = TREE_OPERAND (arg0, 1);
6532 arg10 = TREE_OPERAND (arg1, 0);
6533 arg11 = TREE_OPERAND (arg1, 1);
6534 same = NULL_TREE;
6535
6536 if (operand_equal_p (arg01, arg11, 0))
6537 same = arg01, alt0 = arg00, alt1 = arg10;
6538 else if (operand_equal_p (arg00, arg10, 0))
6539 same = arg00, alt0 = arg01, alt1 = arg11;
6540 else if (operand_equal_p (arg00, arg11, 0))
6541 same = arg00, alt0 = arg01, alt1 = arg10;
6542 else if (operand_equal_p (arg01, arg10, 0))
6543 same = arg01, alt0 = arg00, alt1 = arg11;
6544
6545 /* No identical multiplicands; see if we can find a common
6546 power-of-two factor in non-power-of-two multiplies. This
6547 can help in multi-dimensional array access. */
6548 else if (TREE_CODE (arg01) == INTEGER_CST
6549 && TREE_CODE (arg11) == INTEGER_CST
6550 && TREE_INT_CST_HIGH (arg01) == 0
6551 && TREE_INT_CST_HIGH (arg11) == 0)
6552 {
6553 HOST_WIDE_INT int01, int11, tmp;
6554 int01 = TREE_INT_CST_LOW (arg01);
6555 int11 = TREE_INT_CST_LOW (arg11);
6556
6557 /* Move min of absolute values to int11. */
6558 if ((int01 >= 0 ? int01 : -int01)
6559 < (int11 >= 0 ? int11 : -int11))
6560 {
6561 tmp = int01, int01 = int11, int11 = tmp;
6562 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6563 alt0 = arg01, arg01 = arg11, arg11 = alt0;
6564 }
6565
6566 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6567 {
6568 alt0 = fold (build2 (MULT_EXPR, type, arg00,
6569 build_int_2 (int01 / int11, 0)));
6570 alt1 = arg10;
6571 same = arg11;
6572 }
6573 }
6574
6575 if (same)
6576 return fold (build2 (MULT_EXPR, type,
6577 fold (build2 (PLUS_EXPR, type,
6578 alt0, alt1)),
6579 same));
6580 }
6581 }
6582 else
6583 {
6584 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6585 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6586 return non_lvalue (fold_convert (type, arg0));
6587
6588 /* Likewise if the operands are reversed. */
6589 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6590 return non_lvalue (fold_convert (type, arg1));
6591
6592 /* Convert X + -C into X - C. */
6593 if (TREE_CODE (arg1) == REAL_CST
6594 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
6595 {
6596 tem = fold_negate_const (arg1, type);
6597 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
6598 return fold (build2 (MINUS_EXPR, type,
6599 fold_convert (type, arg0),
6600 fold_convert (type, tem)));
6601 }
6602
6603 /* Convert x+x into x*2.0. */
6604 if (operand_equal_p (arg0, arg1, 0)
6605 && SCALAR_FLOAT_TYPE_P (type))
6606 return fold (build2 (MULT_EXPR, type, arg0,
6607 build_real (type, dconst2)));
6608
6609 /* Convert x*c+x into x*(c+1). */
6610 if (flag_unsafe_math_optimizations
6611 && TREE_CODE (arg0) == MULT_EXPR
6612 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6613 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6614 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6615 {
6616 REAL_VALUE_TYPE c;
6617
6618 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6619 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6620 return fold (build2 (MULT_EXPR, type, arg1,
6621 build_real (type, c)));
6622 }
6623
6624 /* Convert x+x*c into x*(c+1). */
6625 if (flag_unsafe_math_optimizations
6626 && TREE_CODE (arg1) == MULT_EXPR
6627 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6628 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6629 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6630 {
6631 REAL_VALUE_TYPE c;
6632
6633 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6634 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6635 return fold (build2 (MULT_EXPR, type, arg0,
6636 build_real (type, c)));
6637 }
6638
6639 /* Convert x*c1+x*c2 into x*(c1+c2). */
6640 if (flag_unsafe_math_optimizations
6641 && TREE_CODE (arg0) == MULT_EXPR
6642 && TREE_CODE (arg1) == MULT_EXPR
6643 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6644 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6645 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6646 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6647 && operand_equal_p (TREE_OPERAND (arg0, 0),
6648 TREE_OPERAND (arg1, 0), 0))
6649 {
6650 REAL_VALUE_TYPE c1, c2;
6651
6652 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6653 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6654 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6655 return fold (build2 (MULT_EXPR, type,
6656 TREE_OPERAND (arg0, 0),
6657 build_real (type, c1)));
6658 }
6659 /* Convert a + (b*c + d*e) into (a + b*c) + d*e */
6660 if (flag_unsafe_math_optimizations
6661 && TREE_CODE (arg1) == PLUS_EXPR
6662 && TREE_CODE (arg0) != MULT_EXPR)
6663 {
6664 tree tree10 = TREE_OPERAND (arg1, 0);
6665 tree tree11 = TREE_OPERAND (arg1, 1);
6666 if (TREE_CODE (tree11) == MULT_EXPR
6667 && TREE_CODE (tree10) == MULT_EXPR)
6668 {
6669 tree tree0;
6670 tree0 = fold (build2 (PLUS_EXPR, type, arg0, tree10));
6671 return fold (build2 (PLUS_EXPR, type, tree0, tree11));
6672 }
6673 }
6674 /* Convert (b*c + d*e) + a into b*c + (d*e +a) */
6675 if (flag_unsafe_math_optimizations
6676 && TREE_CODE (arg0) == PLUS_EXPR
6677 && TREE_CODE (arg1) != MULT_EXPR)
6678 {
6679 tree tree00 = TREE_OPERAND (arg0, 0);
6680 tree tree01 = TREE_OPERAND (arg0, 1);
6681 if (TREE_CODE (tree01) == MULT_EXPR
6682 && TREE_CODE (tree00) == MULT_EXPR)
6683 {
6684 tree tree0;
6685 tree0 = fold (build2 (PLUS_EXPR, type, tree01, arg1));
6686 return fold (build2 (PLUS_EXPR, type, tree00, tree0));
6687 }
6688 }
6689 }
6690
6691 bit_rotate:
6692 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6693 is a rotate of A by C1 bits. */
6694 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6695 is a rotate of A by B bits. */
6696 {
6697 enum tree_code code0, code1;
6698 code0 = TREE_CODE (arg0);
6699 code1 = TREE_CODE (arg1);
6700 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6701 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6702 && operand_equal_p (TREE_OPERAND (arg0, 0),
6703 TREE_OPERAND (arg1, 0), 0)
6704 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6705 {
6706 tree tree01, tree11;
6707 enum tree_code code01, code11;
6708
6709 tree01 = TREE_OPERAND (arg0, 1);
6710 tree11 = TREE_OPERAND (arg1, 1);
6711 STRIP_NOPS (tree01);
6712 STRIP_NOPS (tree11);
6713 code01 = TREE_CODE (tree01);
6714 code11 = TREE_CODE (tree11);
6715 if (code01 == INTEGER_CST
6716 && code11 == INTEGER_CST
6717 && TREE_INT_CST_HIGH (tree01) == 0
6718 && TREE_INT_CST_HIGH (tree11) == 0
6719 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6720 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6721 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6722 code0 == LSHIFT_EXPR ? tree01 : tree11);
6723 else if (code11 == MINUS_EXPR)
6724 {
6725 tree tree110, tree111;
6726 tree110 = TREE_OPERAND (tree11, 0);
6727 tree111 = TREE_OPERAND (tree11, 1);
6728 STRIP_NOPS (tree110);
6729 STRIP_NOPS (tree111);
6730 if (TREE_CODE (tree110) == INTEGER_CST
6731 && 0 == compare_tree_int (tree110,
6732 TYPE_PRECISION
6733 (TREE_TYPE (TREE_OPERAND
6734 (arg0, 0))))
6735 && operand_equal_p (tree01, tree111, 0))
6736 return build2 ((code0 == LSHIFT_EXPR
6737 ? LROTATE_EXPR
6738 : RROTATE_EXPR),
6739 type, TREE_OPERAND (arg0, 0), tree01);
6740 }
6741 else if (code01 == MINUS_EXPR)
6742 {
6743 tree tree010, tree011;
6744 tree010 = TREE_OPERAND (tree01, 0);
6745 tree011 = TREE_OPERAND (tree01, 1);
6746 STRIP_NOPS (tree010);
6747 STRIP_NOPS (tree011);
6748 if (TREE_CODE (tree010) == INTEGER_CST
6749 && 0 == compare_tree_int (tree010,
6750 TYPE_PRECISION
6751 (TREE_TYPE (TREE_OPERAND
6752 (arg0, 0))))
6753 && operand_equal_p (tree11, tree011, 0))
6754 return build2 ((code0 != LSHIFT_EXPR
6755 ? LROTATE_EXPR
6756 : RROTATE_EXPR),
6757 type, TREE_OPERAND (arg0, 0), tree11);
6758 }
6759 }
6760 }
6761
6762 associate:
6763 /* In most languages, can't associate operations on floats through
6764 parentheses. Rather than remember where the parentheses were, we
6765 don't associate floats at all, unless the user has specified
6766 -funsafe-math-optimizations. */
6767
6768 if (! wins
6769 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6770 {
6771 tree var0, con0, lit0, minus_lit0;
6772 tree var1, con1, lit1, minus_lit1;
6773
6774 /* Split both trees into variables, constants, and literals. Then
6775 associate each group together, the constants with literals,
6776 then the result with variables. This increases the chances of
6777 literals being recombined later and of generating relocatable
6778 expressions for the sum of a constant and literal. */
6779 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6780 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6781 code == MINUS_EXPR);
6782
6783 /* Only do something if we found more than two objects. Otherwise,
6784 nothing has changed and we risk infinite recursion. */
6785 if (2 < ((var0 != 0) + (var1 != 0)
6786 + (con0 != 0) + (con1 != 0)
6787 + (lit0 != 0) + (lit1 != 0)
6788 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6789 {
6790 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6791 if (code == MINUS_EXPR)
6792 code = PLUS_EXPR;
6793
6794 var0 = associate_trees (var0, var1, code, type);
6795 con0 = associate_trees (con0, con1, code, type);
6796 lit0 = associate_trees (lit0, lit1, code, type);
6797 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6798
6799 /* Preserve the MINUS_EXPR if the negative part of the literal is
6800 greater than the positive part. Otherwise, the multiplicative
6801 folding code (i.e extract_muldiv) may be fooled in case
6802 unsigned constants are subtracted, like in the following
6803 example: ((X*2 + 4) - 8U)/2. */
6804 if (minus_lit0 && lit0)
6805 {
6806 if (TREE_CODE (lit0) == INTEGER_CST
6807 && TREE_CODE (minus_lit0) == INTEGER_CST
6808 && tree_int_cst_lt (lit0, minus_lit0))
6809 {
6810 minus_lit0 = associate_trees (minus_lit0, lit0,
6811 MINUS_EXPR, type);
6812 lit0 = 0;
6813 }
6814 else
6815 {
6816 lit0 = associate_trees (lit0, minus_lit0,
6817 MINUS_EXPR, type);
6818 minus_lit0 = 0;
6819 }
6820 }
6821 if (minus_lit0)
6822 {
6823 if (con0 == 0)
6824 return fold_convert (type,
6825 associate_trees (var0, minus_lit0,
6826 MINUS_EXPR, type));
6827 else
6828 {
6829 con0 = associate_trees (con0, minus_lit0,
6830 MINUS_EXPR, type);
6831 return fold_convert (type,
6832 associate_trees (var0, con0,
6833 PLUS_EXPR, type));
6834 }
6835 }
6836
6837 con0 = associate_trees (con0, lit0, code, type);
6838 return fold_convert (type, associate_trees (var0, con0,
6839 code, type));
6840 }
6841 }
6842
6843 binary:
6844 if (wins)
6845 t1 = const_binop (code, arg0, arg1, 0);
6846 if (t1 != NULL_TREE)
6847 {
6848 /* The return value should always have
6849 the same type as the original expression. */
6850 if (TREE_TYPE (t1) != type)
6851 t1 = fold_convert (type, t1);
6852
6853 return t1;
6854 }
6855 return t;
6856
6857 case MINUS_EXPR:
6858 /* A - (-B) -> A + B */
6859 if (TREE_CODE (arg1) == NEGATE_EXPR)
6860 return fold (build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6861 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6862 if (TREE_CODE (arg0) == NEGATE_EXPR
6863 && (FLOAT_TYPE_P (type)
6864 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6865 && negate_expr_p (arg1)
6866 && reorder_operands_p (arg0, arg1))
6867 return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
6868 TREE_OPERAND (arg0, 0)));
6869
6870 if (! FLOAT_TYPE_P (type))
6871 {
6872 if (! wins && integer_zerop (arg0))
6873 return negate_expr (fold_convert (type, arg1));
6874 if (integer_zerop (arg1))
6875 return non_lvalue (fold_convert (type, arg0));
6876
6877 /* Fold A - (A & B) into ~B & A. */
6878 if (!TREE_SIDE_EFFECTS (arg0)
6879 && TREE_CODE (arg1) == BIT_AND_EXPR)
6880 {
6881 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6882 return fold (build2 (BIT_AND_EXPR, type,
6883 fold (build1 (BIT_NOT_EXPR, type,
6884 TREE_OPERAND (arg1, 0))),
6885 arg0));
6886 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6887 return fold (build2 (BIT_AND_EXPR, type,
6888 fold (build1 (BIT_NOT_EXPR, type,
6889 TREE_OPERAND (arg1, 1))),
6890 arg0));
6891 }
6892
6893 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6894 any power of 2 minus 1. */
6895 if (TREE_CODE (arg0) == BIT_AND_EXPR
6896 && TREE_CODE (arg1) == BIT_AND_EXPR
6897 && operand_equal_p (TREE_OPERAND (arg0, 0),
6898 TREE_OPERAND (arg1, 0), 0))
6899 {
6900 tree mask0 = TREE_OPERAND (arg0, 1);
6901 tree mask1 = TREE_OPERAND (arg1, 1);
6902 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6903
6904 if (operand_equal_p (tem, mask1, 0))
6905 {
6906 tem = fold (build2 (BIT_XOR_EXPR, type,
6907 TREE_OPERAND (arg0, 0), mask1));
6908 return fold (build2 (MINUS_EXPR, type, tem, mask1));
6909 }
6910 }
6911 }
6912
6913 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6914 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6915 return non_lvalue (fold_convert (type, arg0));
6916
6917 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6918 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6919 (-ARG1 + ARG0) reduces to -ARG1. */
6920 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6921 return negate_expr (fold_convert (type, arg1));
6922
6923 /* Fold &x - &x. This can happen from &x.foo - &x.
6924 This is unsafe for certain floats even in non-IEEE formats.
6925 In IEEE, it is unsafe because it does wrong for NaNs.
6926 Also note that operand_equal_p is always false if an operand
6927 is volatile. */
6928
6929 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6930 && operand_equal_p (arg0, arg1, 0))
6931 return fold_convert (type, integer_zero_node);
6932
6933 /* A - B -> A + (-B) if B is easily negatable. */
6934 if (!wins && negate_expr_p (arg1)
6935 && ((FLOAT_TYPE_P (type)
6936 /* Avoid this transformation if B is a positive REAL_CST. */
6937 && (TREE_CODE (arg1) != REAL_CST
6938 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
6939 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
6940 return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
6941
6942 if (TREE_CODE (arg0) == MULT_EXPR
6943 && TREE_CODE (arg1) == MULT_EXPR
6944 && (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
6945 {
6946 /* (A * C) - (B * C) -> (A-B) * C. */
6947 if (operand_equal_p (TREE_OPERAND (arg0, 1),
6948 TREE_OPERAND (arg1, 1), 0))
6949 return fold (build2 (MULT_EXPR, type,
6950 fold (build2 (MINUS_EXPR, type,
6951 TREE_OPERAND (arg0, 0),
6952 TREE_OPERAND (arg1, 0))),
6953 TREE_OPERAND (arg0, 1)));
6954 /* (A * C1) - (A * C2) -> A * (C1-C2). */
6955 if (operand_equal_p (TREE_OPERAND (arg0, 0),
6956 TREE_OPERAND (arg1, 0), 0))
6957 return fold (build2 (MULT_EXPR, type,
6958 TREE_OPERAND (arg0, 0),
6959 fold (build2 (MINUS_EXPR, type,
6960 TREE_OPERAND (arg0, 1),
6961 TREE_OPERAND (arg1, 1)))));
6962 }
6963
6964 goto associate;
6965
6966 case MULT_EXPR:
6967 /* (-A) * (-B) -> A * B */
6968 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6969 return fold (build2 (MULT_EXPR, type,
6970 TREE_OPERAND (arg0, 0),
6971 negate_expr (arg1)));
6972 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6973 return fold (build2 (MULT_EXPR, type,
6974 negate_expr (arg0),
6975 TREE_OPERAND (arg1, 0)));
6976
6977 if (! FLOAT_TYPE_P (type))
6978 {
6979 if (integer_zerop (arg1))
6980 return omit_one_operand (type, arg1, arg0);
6981 if (integer_onep (arg1))
6982 return non_lvalue (fold_convert (type, arg0));
6983
6984 /* (a * (1 << b)) is (a << b) */
6985 if (TREE_CODE (arg1) == LSHIFT_EXPR
6986 && integer_onep (TREE_OPERAND (arg1, 0)))
6987 return fold (build2 (LSHIFT_EXPR, type, arg0,
6988 TREE_OPERAND (arg1, 1)));
6989 if (TREE_CODE (arg0) == LSHIFT_EXPR
6990 && integer_onep (TREE_OPERAND (arg0, 0)))
6991 return fold (build2 (LSHIFT_EXPR, type, arg1,
6992 TREE_OPERAND (arg0, 1)));
6993
6994 if (TREE_CODE (arg1) == INTEGER_CST
6995 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6996 fold_convert (type, arg1),
6997 code, NULL_TREE)))
6998 return fold_convert (type, tem);
6999
7000 }
7001 else
7002 {
7003 /* Maybe fold x * 0 to 0. The expressions aren't the same
7004 when x is NaN, since x * 0 is also NaN. Nor are they the
7005 same in modes with signed zeros, since multiplying a
7006 negative value by 0 gives -0, not +0. */
7007 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7008 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7009 && real_zerop (arg1))
7010 return omit_one_operand (type, arg1, arg0);
7011 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7012 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7013 && real_onep (arg1))
7014 return non_lvalue (fold_convert (type, arg0));
7015
7016 /* Transform x * -1.0 into -x. */
7017 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7018 && real_minus_onep (arg1))
7019 return fold_convert (type, negate_expr (arg0));
7020
7021 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7022 if (flag_unsafe_math_optimizations
7023 && TREE_CODE (arg0) == RDIV_EXPR
7024 && TREE_CODE (arg1) == REAL_CST
7025 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7026 {
7027 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7028 arg1, 0);
7029 if (tem)
7030 return fold (build2 (RDIV_EXPR, type, tem,
7031 TREE_OPERAND (arg0, 1)));
7032 }
7033
7034 if (flag_unsafe_math_optimizations)
7035 {
7036 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7037 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7038
7039 /* Optimizations of root(...)*root(...). */
7040 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7041 {
7042 tree rootfn, arg, arglist;
7043 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7044 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7045
7046 /* Optimize sqrt(x)*sqrt(x) as x. */
7047 if (BUILTIN_SQRT_P (fcode0)
7048 && operand_equal_p (arg00, arg10, 0)
7049 && ! HONOR_SNANS (TYPE_MODE (type)))
7050 return arg00;
7051
7052 /* Optimize root(x)*root(y) as root(x*y). */
7053 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7054 arg = fold (build2 (MULT_EXPR, type, arg00, arg10));
7055 arglist = build_tree_list (NULL_TREE, arg);
7056 return build_function_call_expr (rootfn, arglist);
7057 }
7058
7059 /* Optimize expN(x)*expN(y) as expN(x+y). */
7060 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7061 {
7062 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7063 tree arg = build2 (PLUS_EXPR, type,
7064 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7065 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7066 tree arglist = build_tree_list (NULL_TREE, fold (arg));
7067 return build_function_call_expr (expfn, arglist);
7068 }
7069
7070 /* Optimizations of pow(...)*pow(...). */
7071 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7072 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7073 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7074 {
7075 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7076 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7077 1)));
7078 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7079 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7080 1)));
7081
7082 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7083 if (operand_equal_p (arg01, arg11, 0))
7084 {
7085 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7086 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
7087 tree arglist = tree_cons (NULL_TREE, fold (arg),
7088 build_tree_list (NULL_TREE,
7089 arg01));
7090 return build_function_call_expr (powfn, arglist);
7091 }
7092
7093 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7094 if (operand_equal_p (arg00, arg10, 0))
7095 {
7096 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7097 tree arg = fold (build2 (PLUS_EXPR, type, arg01, arg11));
7098 tree arglist = tree_cons (NULL_TREE, arg00,
7099 build_tree_list (NULL_TREE,
7100 arg));
7101 return build_function_call_expr (powfn, arglist);
7102 }
7103 }
7104
7105 /* Optimize tan(x)*cos(x) as sin(x). */
7106 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7107 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7108 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7109 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7110 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7111 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7112 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7113 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7114 {
7115 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7116
7117 if (sinfn != NULL_TREE)
7118 return build_function_call_expr (sinfn,
7119 TREE_OPERAND (arg0, 1));
7120 }
7121
7122 /* Optimize x*pow(x,c) as pow(x,c+1). */
7123 if (fcode1 == BUILT_IN_POW
7124 || fcode1 == BUILT_IN_POWF
7125 || fcode1 == BUILT_IN_POWL)
7126 {
7127 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7128 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7129 1)));
7130 if (TREE_CODE (arg11) == REAL_CST
7131 && ! TREE_CONSTANT_OVERFLOW (arg11)
7132 && operand_equal_p (arg0, arg10, 0))
7133 {
7134 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7135 REAL_VALUE_TYPE c;
7136 tree arg, arglist;
7137
7138 c = TREE_REAL_CST (arg11);
7139 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7140 arg = build_real (type, c);
7141 arglist = build_tree_list (NULL_TREE, arg);
7142 arglist = tree_cons (NULL_TREE, arg0, arglist);
7143 return build_function_call_expr (powfn, arglist);
7144 }
7145 }
7146
7147 /* Optimize pow(x,c)*x as pow(x,c+1). */
7148 if (fcode0 == BUILT_IN_POW
7149 || fcode0 == BUILT_IN_POWF
7150 || fcode0 == BUILT_IN_POWL)
7151 {
7152 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7153 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7154 1)));
7155 if (TREE_CODE (arg01) == REAL_CST
7156 && ! TREE_CONSTANT_OVERFLOW (arg01)
7157 && operand_equal_p (arg1, arg00, 0))
7158 {
7159 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7160 REAL_VALUE_TYPE c;
7161 tree arg, arglist;
7162
7163 c = TREE_REAL_CST (arg01);
7164 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7165 arg = build_real (type, c);
7166 arglist = build_tree_list (NULL_TREE, arg);
7167 arglist = tree_cons (NULL_TREE, arg1, arglist);
7168 return build_function_call_expr (powfn, arglist);
7169 }
7170 }
7171
7172 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7173 if (! optimize_size
7174 && operand_equal_p (arg0, arg1, 0))
7175 {
7176 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7177
7178 if (powfn)
7179 {
7180 tree arg = build_real (type, dconst2);
7181 tree arglist = build_tree_list (NULL_TREE, arg);
7182 arglist = tree_cons (NULL_TREE, arg0, arglist);
7183 return build_function_call_expr (powfn, arglist);
7184 }
7185 }
7186 }
7187 }
7188 goto associate;
7189
7190 case BIT_IOR_EXPR:
7191 bit_ior:
7192 if (integer_all_onesp (arg1))
7193 return omit_one_operand (type, arg1, arg0);
7194 if (integer_zerop (arg1))
7195 return non_lvalue (fold_convert (type, arg0));
7196 if (operand_equal_p (arg0, arg1, 0))
7197 return non_lvalue (fold_convert (type, arg0));
7198
7199 /* ~X | X is -1. */
7200 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7201 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7202 {
7203 t1 = build_int_2 (-1, -1);
7204 TREE_TYPE (t1) = type;
7205 force_fit_type (t1, 0);
7206 return omit_one_operand (type, t1, arg1);
7207 }
7208
7209 /* X | ~X is -1. */
7210 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7211 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7212 {
7213 t1 = build_int_2 (-1, -1);
7214 TREE_TYPE (t1) = type;
7215 force_fit_type (t1, 0);
7216 return omit_one_operand (type, t1, arg0);
7217 }
7218
7219 t1 = distribute_bit_expr (code, type, arg0, arg1);
7220 if (t1 != NULL_TREE)
7221 return t1;
7222
7223 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
7224
7225 This results in more efficient code for machines without a NAND
7226 instruction. Combine will canonicalize to the first form
7227 which will allow use of NAND instructions provided by the
7228 backend if they exist. */
7229 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7230 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7231 {
7232 return fold (build1 (BIT_NOT_EXPR, type,
7233 build2 (BIT_AND_EXPR, type,
7234 TREE_OPERAND (arg0, 0),
7235 TREE_OPERAND (arg1, 0))));
7236 }
7237
7238 /* See if this can be simplified into a rotate first. If that
7239 is unsuccessful continue in the association code. */
7240 goto bit_rotate;
7241
7242 case BIT_XOR_EXPR:
7243 if (integer_zerop (arg1))
7244 return non_lvalue (fold_convert (type, arg0));
7245 if (integer_all_onesp (arg1))
7246 return fold (build1 (BIT_NOT_EXPR, type, arg0));
7247 if (operand_equal_p (arg0, arg1, 0))
7248 return omit_one_operand (type, integer_zero_node, arg0);
7249
7250 /* ~X ^ X is -1. */
7251 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7252 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7253 {
7254 t1 = build_int_2 (-1, -1);
7255 TREE_TYPE (t1) = type;
7256 force_fit_type (t1, 0);
7257 return omit_one_operand (type, t1, arg1);
7258 }
7259
7260 /* X ^ ~X is -1. */
7261 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7262 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7263 {
7264 t1 = build_int_2 (-1, -1);
7265 TREE_TYPE (t1) = type;
7266 force_fit_type (t1, 0);
7267 return omit_one_operand (type, t1, arg0);
7268 }
7269
7270 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
7271 with a constant, and the two constants have no bits in common,
7272 we should treat this as a BIT_IOR_EXPR since this may produce more
7273 simplifications. */
7274 if (TREE_CODE (arg0) == BIT_AND_EXPR
7275 && TREE_CODE (arg1) == BIT_AND_EXPR
7276 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7277 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7278 && integer_zerop (const_binop (BIT_AND_EXPR,
7279 TREE_OPERAND (arg0, 1),
7280 TREE_OPERAND (arg1, 1), 0)))
7281 {
7282 code = BIT_IOR_EXPR;
7283 goto bit_ior;
7284 }
7285
7286 /* See if this can be simplified into a rotate first. If that
7287 is unsuccessful continue in the association code. */
7288 goto bit_rotate;
7289
7290 case BIT_AND_EXPR:
7291 if (integer_all_onesp (arg1))
7292 return non_lvalue (fold_convert (type, arg0));
7293 if (integer_zerop (arg1))
7294 return omit_one_operand (type, arg1, arg0);
7295 if (operand_equal_p (arg0, arg1, 0))
7296 return non_lvalue (fold_convert (type, arg0));
7297
7298 /* ~X & X is always zero. */
7299 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7300 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7301 return omit_one_operand (type, integer_zero_node, arg1);
7302
7303 /* X & ~X is always zero. */
7304 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7305 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7306 return omit_one_operand (type, integer_zero_node, arg0);
7307
7308 t1 = distribute_bit_expr (code, type, arg0, arg1);
7309 if (t1 != NULL_TREE)
7310 return t1;
7311 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
7312 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
7313 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7314 {
7315 unsigned int prec
7316 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
7317
7318 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
7319 && (~TREE_INT_CST_LOW (arg1)
7320 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
7321 return fold_convert (type, TREE_OPERAND (arg0, 0));
7322 }
7323
7324 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
7325
7326 This results in more efficient code for machines without a NOR
7327 instruction. Combine will canonicalize to the first form
7328 which will allow use of NOR instructions provided by the
7329 backend if they exist. */
7330 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7331 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7332 {
7333 return fold (build1 (BIT_NOT_EXPR, type,
7334 build2 (BIT_IOR_EXPR, type,
7335 TREE_OPERAND (arg0, 0),
7336 TREE_OPERAND (arg1, 0))));
7337 }
7338
7339 goto associate;
7340
7341 case RDIV_EXPR:
7342 /* Don't touch a floating-point divide by zero unless the mode
7343 of the constant can represent infinity. */
7344 if (TREE_CODE (arg1) == REAL_CST
7345 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
7346 && real_zerop (arg1))
7347 return t;
7348
7349 /* (-A) / (-B) -> A / B */
7350 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7351 return fold (build2 (RDIV_EXPR, type,
7352 TREE_OPERAND (arg0, 0),
7353 negate_expr (arg1)));
7354 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7355 return fold (build2 (RDIV_EXPR, type,
7356 negate_expr (arg0),
7357 TREE_OPERAND (arg1, 0)));
7358
7359 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
7360 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7361 && real_onep (arg1))
7362 return non_lvalue (fold_convert (type, arg0));
7363
7364 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
7365 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7366 && real_minus_onep (arg1))
7367 return non_lvalue (fold_convert (type, negate_expr (arg0)));
7368
7369 /* If ARG1 is a constant, we can convert this to a multiply by the
7370 reciprocal. This does not have the same rounding properties,
7371 so only do this if -funsafe-math-optimizations. We can actually
7372 always safely do it if ARG1 is a power of two, but it's hard to
7373 tell if it is or not in a portable manner. */
7374 if (TREE_CODE (arg1) == REAL_CST)
7375 {
7376 if (flag_unsafe_math_optimizations
7377 && 0 != (tem = const_binop (code, build_real (type, dconst1),
7378 arg1, 0)))
7379 return fold (build2 (MULT_EXPR, type, arg0, tem));
7380 /* Find the reciprocal if optimizing and the result is exact. */
7381 if (optimize)
7382 {
7383 REAL_VALUE_TYPE r;
7384 r = TREE_REAL_CST (arg1);
7385 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
7386 {
7387 tem = build_real (type, r);
7388 return fold (build2 (MULT_EXPR, type, arg0, tem));
7389 }
7390 }
7391 }
7392 /* Convert A/B/C to A/(B*C). */
7393 if (flag_unsafe_math_optimizations
7394 && TREE_CODE (arg0) == RDIV_EXPR)
7395 return fold (build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
7396 fold (build2 (MULT_EXPR, type,
7397 TREE_OPERAND (arg0, 1), arg1))));
7398
7399 /* Convert A/(B/C) to (A/B)*C. */
7400 if (flag_unsafe_math_optimizations
7401 && TREE_CODE (arg1) == RDIV_EXPR)
7402 return fold (build2 (MULT_EXPR, type,
7403 fold (build2 (RDIV_EXPR, type, arg0,
7404 TREE_OPERAND (arg1, 0))),
7405 TREE_OPERAND (arg1, 1)));
7406
7407 /* Convert C1/(X*C2) into (C1/C2)/X. */
7408 if (flag_unsafe_math_optimizations
7409 && TREE_CODE (arg1) == MULT_EXPR
7410 && TREE_CODE (arg0) == REAL_CST
7411 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
7412 {
7413 tree tem = const_binop (RDIV_EXPR, arg0,
7414 TREE_OPERAND (arg1, 1), 0);
7415 if (tem)
7416 return fold (build2 (RDIV_EXPR, type, tem,
7417 TREE_OPERAND (arg1, 0)));
7418 }
7419
7420 if (flag_unsafe_math_optimizations)
7421 {
7422 enum built_in_function fcode = builtin_mathfn_code (arg1);
7423 /* Optimize x/expN(y) into x*expN(-y). */
7424 if (BUILTIN_EXPONENT_P (fcode))
7425 {
7426 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7427 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
7428 tree arglist = build_tree_list (NULL_TREE,
7429 fold_convert (type, arg));
7430 arg1 = build_function_call_expr (expfn, arglist);
7431 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7432 }
7433
7434 /* Optimize x/pow(y,z) into x*pow(y,-z). */
7435 if (fcode == BUILT_IN_POW
7436 || fcode == BUILT_IN_POWF
7437 || fcode == BUILT_IN_POWL)
7438 {
7439 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7440 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7441 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
7442 tree neg11 = fold_convert (type, negate_expr (arg11));
7443 tree arglist = tree_cons(NULL_TREE, arg10,
7444 build_tree_list (NULL_TREE, neg11));
7445 arg1 = build_function_call_expr (powfn, arglist);
7446 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7447 }
7448 }
7449
7450 if (flag_unsafe_math_optimizations)
7451 {
7452 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7453 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7454
7455 /* Optimize sin(x)/cos(x) as tan(x). */
7456 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
7457 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
7458 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
7459 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7460 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7461 {
7462 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7463
7464 if (tanfn != NULL_TREE)
7465 return build_function_call_expr (tanfn,
7466 TREE_OPERAND (arg0, 1));
7467 }
7468
7469 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
7470 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
7471 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
7472 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
7473 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7474 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7475 {
7476 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7477
7478 if (tanfn != NULL_TREE)
7479 {
7480 tree tmp = TREE_OPERAND (arg0, 1);
7481 tmp = build_function_call_expr (tanfn, tmp);
7482 return fold (build2 (RDIV_EXPR, type,
7483 build_real (type, dconst1), tmp));
7484 }
7485 }
7486
7487 /* Optimize pow(x,c)/x as pow(x,c-1). */
7488 if (fcode0 == BUILT_IN_POW
7489 || fcode0 == BUILT_IN_POWF
7490 || fcode0 == BUILT_IN_POWL)
7491 {
7492 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7493 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
7494 if (TREE_CODE (arg01) == REAL_CST
7495 && ! TREE_CONSTANT_OVERFLOW (arg01)
7496 && operand_equal_p (arg1, arg00, 0))
7497 {
7498 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7499 REAL_VALUE_TYPE c;
7500 tree arg, arglist;
7501
7502 c = TREE_REAL_CST (arg01);
7503 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
7504 arg = build_real (type, c);
7505 arglist = build_tree_list (NULL_TREE, arg);
7506 arglist = tree_cons (NULL_TREE, arg1, arglist);
7507 return build_function_call_expr (powfn, arglist);
7508 }
7509 }
7510 }
7511 goto binary;
7512
7513 case TRUNC_DIV_EXPR:
7514 case ROUND_DIV_EXPR:
7515 case FLOOR_DIV_EXPR:
7516 case CEIL_DIV_EXPR:
7517 case EXACT_DIV_EXPR:
7518 if (integer_onep (arg1))
7519 return non_lvalue (fold_convert (type, arg0));
7520 if (integer_zerop (arg1))
7521 return t;
7522 /* X / -1 is -X. */
7523 if (!TYPE_UNSIGNED (type)
7524 && TREE_CODE (arg1) == INTEGER_CST
7525 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7526 && TREE_INT_CST_HIGH (arg1) == -1)
7527 return fold_convert (type, negate_expr (arg0));
7528
7529 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
7530 operation, EXACT_DIV_EXPR.
7531
7532 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
7533 At one time others generated faster code, it's not clear if they do
7534 after the last round to changes to the DIV code in expmed.c. */
7535 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
7536 && multiple_of_p (type, arg0, arg1))
7537 return fold (build2 (EXACT_DIV_EXPR, type, arg0, arg1));
7538
7539 if (TREE_CODE (arg1) == INTEGER_CST
7540 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7541 code, NULL_TREE)))
7542 return fold_convert (type, tem);
7543
7544 goto binary;
7545
7546 case CEIL_MOD_EXPR:
7547 case FLOOR_MOD_EXPR:
7548 case ROUND_MOD_EXPR:
7549 case TRUNC_MOD_EXPR:
7550 if (integer_onep (arg1))
7551 return omit_one_operand (type, integer_zero_node, arg0);
7552 if (integer_zerop (arg1))
7553 return t;
7554
7555 /* X % -1 is zero. */
7556 if (!TYPE_UNSIGNED (type)
7557 && TREE_CODE (arg1) == INTEGER_CST
7558 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7559 && TREE_INT_CST_HIGH (arg1) == -1)
7560 return omit_one_operand (type, integer_zero_node, arg0);
7561
7562 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
7563 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
7564 if (code == TRUNC_MOD_EXPR
7565 && TYPE_UNSIGNED (type)
7566 && integer_pow2p (arg1))
7567 {
7568 unsigned HOST_WIDE_INT high, low;
7569 tree mask;
7570 int l;
7571
7572 l = tree_log2 (arg1);
7573 if (l >= HOST_BITS_PER_WIDE_INT)
7574 {
7575 high = ((unsigned HOST_WIDE_INT) 1
7576 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
7577 low = -1;
7578 }
7579 else
7580 {
7581 high = 0;
7582 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
7583 }
7584
7585 mask = build_int_2 (low, high);
7586 TREE_TYPE (mask) = type;
7587 return fold (build2 (BIT_AND_EXPR, type,
7588 fold_convert (type, arg0), mask));
7589 }
7590
7591 /* X % -C is the same as X % C (for all rounding moduli). */
7592 if (!TYPE_UNSIGNED (type)
7593 && TREE_CODE (arg1) == INTEGER_CST
7594 && TREE_INT_CST_HIGH (arg1) < 0
7595 && !flag_trapv
7596 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
7597 && !sign_bit_p (arg1, arg1))
7598 return fold (build2 (code, type, fold_convert (type, arg0),
7599 fold_convert (type, negate_expr (arg1))));
7600
7601 /* X % -Y is the same as X % Y (for all rounding moduli). */
7602 if (!TYPE_UNSIGNED (type)
7603 && TREE_CODE (arg1) == NEGATE_EXPR
7604 && !flag_trapv)
7605 return fold (build2 (code, type, fold_convert (type, arg0),
7606 fold_convert (type, TREE_OPERAND (arg1, 0))));
7607
7608 if (TREE_CODE (arg1) == INTEGER_CST
7609 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7610 code, NULL_TREE)))
7611 return fold_convert (type, tem);
7612
7613 goto binary;
7614
7615 case LROTATE_EXPR:
7616 case RROTATE_EXPR:
7617 if (integer_all_onesp (arg0))
7618 return omit_one_operand (type, arg0, arg1);
7619 goto shift;
7620
7621 case RSHIFT_EXPR:
7622 /* Optimize -1 >> x for arithmetic right shifts. */
7623 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
7624 return omit_one_operand (type, arg0, arg1);
7625 /* ... fall through ... */
7626
7627 case LSHIFT_EXPR:
7628 shift:
7629 if (integer_zerop (arg1))
7630 return non_lvalue (fold_convert (type, arg0));
7631 if (integer_zerop (arg0))
7632 return omit_one_operand (type, arg0, arg1);
7633
7634 /* Since negative shift count is not well-defined,
7635 don't try to compute it in the compiler. */
7636 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
7637 return t;
7638 /* Rewrite an LROTATE_EXPR by a constant into an
7639 RROTATE_EXPR by a new constant. */
7640 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
7641 {
7642 tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
7643 tem = fold_convert (TREE_TYPE (arg1), tem);
7644 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
7645 return fold (build2 (RROTATE_EXPR, type, arg0, tem));
7646 }
7647
7648 /* If we have a rotate of a bit operation with the rotate count and
7649 the second operand of the bit operation both constant,
7650 permute the two operations. */
7651 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7652 && (TREE_CODE (arg0) == BIT_AND_EXPR
7653 || TREE_CODE (arg0) == BIT_IOR_EXPR
7654 || TREE_CODE (arg0) == BIT_XOR_EXPR)
7655 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7656 return fold (build2 (TREE_CODE (arg0), type,
7657 fold (build2 (code, type,
7658 TREE_OPERAND (arg0, 0), arg1)),
7659 fold (build2 (code, type,
7660 TREE_OPERAND (arg0, 1), arg1))));
7661
7662 /* Two consecutive rotates adding up to the width of the mode can
7663 be ignored. */
7664 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7665 && TREE_CODE (arg0) == RROTATE_EXPR
7666 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7667 && TREE_INT_CST_HIGH (arg1) == 0
7668 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
7669 && ((TREE_INT_CST_LOW (arg1)
7670 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
7671 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
7672 return TREE_OPERAND (arg0, 0);
7673
7674 goto binary;
7675
7676 case MIN_EXPR:
7677 if (operand_equal_p (arg0, arg1, 0))
7678 return omit_one_operand (type, arg0, arg1);
7679 if (INTEGRAL_TYPE_P (type)
7680 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
7681 return omit_one_operand (type, arg1, arg0);
7682 goto associate;
7683
7684 case MAX_EXPR:
7685 if (operand_equal_p (arg0, arg1, 0))
7686 return omit_one_operand (type, arg0, arg1);
7687 if (INTEGRAL_TYPE_P (type)
7688 && TYPE_MAX_VALUE (type)
7689 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
7690 return omit_one_operand (type, arg1, arg0);
7691 goto associate;
7692
7693 case TRUTH_NOT_EXPR:
7694 /* The argument to invert_truthvalue must have Boolean type. */
7695 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7696 arg0 = fold_convert (boolean_type_node, arg0);
7697
7698 /* Note that the operand of this must be an int
7699 and its values must be 0 or 1.
7700 ("true" is a fixed value perhaps depending on the language,
7701 but we don't handle values other than 1 correctly yet.) */
7702 tem = invert_truthvalue (arg0);
7703 /* Avoid infinite recursion. */
7704 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7705 {
7706 tem = fold_single_bit_test (code, arg0, arg1, type);
7707 if (tem)
7708 return tem;
7709 return t;
7710 }
7711 return fold_convert (type, tem);
7712
7713 case TRUTH_ANDIF_EXPR:
7714 /* Note that the operands of this must be ints
7715 and their values must be 0 or 1.
7716 ("true" is a fixed value perhaps depending on the language.) */
7717 /* If first arg is constant zero, return it. */
7718 if (integer_zerop (arg0))
7719 return fold_convert (type, arg0);
7720 case TRUTH_AND_EXPR:
7721 /* If either arg is constant true, drop it. */
7722 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7723 return non_lvalue (fold_convert (type, arg1));
7724 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7725 /* Preserve sequence points. */
7726 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7727 return non_lvalue (fold_convert (type, arg0));
7728 /* If second arg is constant zero, result is zero, but first arg
7729 must be evaluated. */
7730 if (integer_zerop (arg1))
7731 return omit_one_operand (type, arg1, arg0);
7732 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7733 case will be handled here. */
7734 if (integer_zerop (arg0))
7735 return omit_one_operand (type, arg0, arg1);
7736
7737 /* !X && X is always false. */
7738 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
7739 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7740 return omit_one_operand (type, integer_zero_node, arg1);
7741 /* X && !X is always false. */
7742 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
7743 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7744 return omit_one_operand (type, integer_zero_node, arg0);
7745
7746 truth_andor:
7747 /* We only do these simplifications if we are optimizing. */
7748 if (!optimize)
7749 return t;
7750
7751 /* Check for things like (A || B) && (A || C). We can convert this
7752 to A || (B && C). Note that either operator can be any of the four
7753 truth and/or operations and the transformation will still be
7754 valid. Also note that we only care about order for the
7755 ANDIF and ORIF operators. If B contains side effects, this
7756 might change the truth-value of A. */
7757 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7758 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7759 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7760 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7761 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7762 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7763 {
7764 tree a00 = TREE_OPERAND (arg0, 0);
7765 tree a01 = TREE_OPERAND (arg0, 1);
7766 tree a10 = TREE_OPERAND (arg1, 0);
7767 tree a11 = TREE_OPERAND (arg1, 1);
7768 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7769 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7770 && (code == TRUTH_AND_EXPR
7771 || code == TRUTH_OR_EXPR));
7772
7773 if (operand_equal_p (a00, a10, 0))
7774 return fold (build2 (TREE_CODE (arg0), type, a00,
7775 fold (build2 (code, type, a01, a11))));
7776 else if (commutative && operand_equal_p (a00, a11, 0))
7777 return fold (build2 (TREE_CODE (arg0), type, a00,
7778 fold (build2 (code, type, a01, a10))));
7779 else if (commutative && operand_equal_p (a01, a10, 0))
7780 return fold (build2 (TREE_CODE (arg0), type, a01,
7781 fold (build2 (code, type, a00, a11))));
7782
7783 /* This case if tricky because we must either have commutative
7784 operators or else A10 must not have side-effects. */
7785
7786 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7787 && operand_equal_p (a01, a11, 0))
7788 return fold (build2 (TREE_CODE (arg0), type,
7789 fold (build2 (code, type, a00, a10)),
7790 a01));
7791 }
7792
7793 /* See if we can build a range comparison. */
7794 if (0 != (tem = fold_range_test (t)))
7795 return tem;
7796
7797 /* Check for the possibility of merging component references. If our
7798 lhs is another similar operation, try to merge its rhs with our
7799 rhs. Then try to merge our lhs and rhs. */
7800 if (TREE_CODE (arg0) == code
7801 && 0 != (tem = fold_truthop (code, type,
7802 TREE_OPERAND (arg0, 1), arg1)))
7803 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7804
7805 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7806 return tem;
7807
7808 return t;
7809
7810 case TRUTH_ORIF_EXPR:
7811 /* Note that the operands of this must be ints
7812 and their values must be 0 or true.
7813 ("true" is a fixed value perhaps depending on the language.) */
7814 /* If first arg is constant true, return it. */
7815 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7816 return fold_convert (type, arg0);
7817 case TRUTH_OR_EXPR:
7818 /* If either arg is constant zero, drop it. */
7819 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7820 return non_lvalue (fold_convert (type, arg1));
7821 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7822 /* Preserve sequence points. */
7823 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7824 return non_lvalue (fold_convert (type, arg0));
7825 /* If second arg is constant true, result is true, but we must
7826 evaluate first arg. */
7827 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7828 return omit_one_operand (type, arg1, arg0);
7829 /* Likewise for first arg, but note this only occurs here for
7830 TRUTH_OR_EXPR. */
7831 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7832 return omit_one_operand (type, arg0, arg1);
7833
7834 /* !X || X is always true. */
7835 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
7836 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7837 return omit_one_operand (type, integer_one_node, arg1);
7838 /* X || !X is always true. */
7839 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
7840 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7841 return omit_one_operand (type, integer_one_node, arg0);
7842
7843 goto truth_andor;
7844
7845 case TRUTH_XOR_EXPR:
7846 /* If the second arg is constant zero, drop it. */
7847 if (integer_zerop (arg1))
7848 return non_lvalue (fold_convert (type, arg0));
7849 /* If the second arg is constant true, this is a logical inversion. */
7850 if (integer_onep (arg1))
7851 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7852 /* Identical arguments cancel to zero. */
7853 if (operand_equal_p (arg0, arg1, 0))
7854 return omit_one_operand (type, integer_zero_node, arg0);
7855
7856 /* !X ^ X is always true. */
7857 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
7858 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7859 return omit_one_operand (type, integer_one_node, arg1);
7860
7861 /* X ^ !X is always true. */
7862 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
7863 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7864 return omit_one_operand (type, integer_one_node, arg0);
7865
7866 return t;
7867
7868 case EQ_EXPR:
7869 case NE_EXPR:
7870 case LT_EXPR:
7871 case GT_EXPR:
7872 case LE_EXPR:
7873 case GE_EXPR:
7874 /* If one arg is a real or integer constant, put it last. */
7875 if (tree_swap_operands_p (arg0, arg1, true))
7876 return fold (build2 (swap_tree_comparison (code), type, arg1, arg0));
7877
7878 /* If this is an equality comparison of the address of a non-weak
7879 object against zero, then we know the result. */
7880 if ((code == EQ_EXPR || code == NE_EXPR)
7881 && TREE_CODE (arg0) == ADDR_EXPR
7882 && DECL_P (TREE_OPERAND (arg0, 0))
7883 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7884 && integer_zerop (arg1))
7885 return constant_boolean_node (code != EQ_EXPR, type);
7886
7887 /* If this is an equality comparison of the address of two non-weak,
7888 unaliased symbols neither of which are extern (since we do not
7889 have access to attributes for externs), then we know the result. */
7890 if ((code == EQ_EXPR || code == NE_EXPR)
7891 && TREE_CODE (arg0) == ADDR_EXPR
7892 && DECL_P (TREE_OPERAND (arg0, 0))
7893 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7894 && ! lookup_attribute ("alias",
7895 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
7896 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
7897 && TREE_CODE (arg1) == ADDR_EXPR
7898 && DECL_P (TREE_OPERAND (arg1, 0))
7899 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
7900 && ! lookup_attribute ("alias",
7901 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
7902 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
7903 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
7904 ? code == EQ_EXPR : code != EQ_EXPR,
7905 type);
7906
7907 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7908 {
7909 tree targ0 = strip_float_extensions (arg0);
7910 tree targ1 = strip_float_extensions (arg1);
7911 tree newtype = TREE_TYPE (targ0);
7912
7913 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7914 newtype = TREE_TYPE (targ1);
7915
7916 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7917 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7918 return fold (build2 (code, type, fold_convert (newtype, targ0),
7919 fold_convert (newtype, targ1)));
7920
7921 /* (-a) CMP (-b) -> b CMP a */
7922 if (TREE_CODE (arg0) == NEGATE_EXPR
7923 && TREE_CODE (arg1) == NEGATE_EXPR)
7924 return fold (build2 (code, type, TREE_OPERAND (arg1, 0),
7925 TREE_OPERAND (arg0, 0)));
7926
7927 if (TREE_CODE (arg1) == REAL_CST)
7928 {
7929 REAL_VALUE_TYPE cst;
7930 cst = TREE_REAL_CST (arg1);
7931
7932 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7933 if (TREE_CODE (arg0) == NEGATE_EXPR)
7934 return
7935 fold (build2 (swap_tree_comparison (code), type,
7936 TREE_OPERAND (arg0, 0),
7937 build_real (TREE_TYPE (arg1),
7938 REAL_VALUE_NEGATE (cst))));
7939
7940 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7941 /* a CMP (-0) -> a CMP 0 */
7942 if (REAL_VALUE_MINUS_ZERO (cst))
7943 return fold (build2 (code, type, arg0,
7944 build_real (TREE_TYPE (arg1), dconst0)));
7945
7946 /* x != NaN is always true, other ops are always false. */
7947 if (REAL_VALUE_ISNAN (cst)
7948 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7949 {
7950 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7951 return omit_one_operand (type, tem, arg0);
7952 }
7953
7954 /* Fold comparisons against infinity. */
7955 if (REAL_VALUE_ISINF (cst))
7956 {
7957 tem = fold_inf_compare (code, type, arg0, arg1);
7958 if (tem != NULL_TREE)
7959 return tem;
7960 }
7961 }
7962
7963 /* If this is a comparison of a real constant with a PLUS_EXPR
7964 or a MINUS_EXPR of a real constant, we can convert it into a
7965 comparison with a revised real constant as long as no overflow
7966 occurs when unsafe_math_optimizations are enabled. */
7967 if (flag_unsafe_math_optimizations
7968 && TREE_CODE (arg1) == REAL_CST
7969 && (TREE_CODE (arg0) == PLUS_EXPR
7970 || TREE_CODE (arg0) == MINUS_EXPR)
7971 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7972 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7973 ? MINUS_EXPR : PLUS_EXPR,
7974 arg1, TREE_OPERAND (arg0, 1), 0))
7975 && ! TREE_CONSTANT_OVERFLOW (tem))
7976 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7977
7978 /* Likewise, we can simplify a comparison of a real constant with
7979 a MINUS_EXPR whose first operand is also a real constant, i.e.
7980 (c1 - x) < c2 becomes x > c1-c2. */
7981 if (flag_unsafe_math_optimizations
7982 && TREE_CODE (arg1) == REAL_CST
7983 && TREE_CODE (arg0) == MINUS_EXPR
7984 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7985 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7986 arg1, 0))
7987 && ! TREE_CONSTANT_OVERFLOW (tem))
7988 return fold (build2 (swap_tree_comparison (code), type,
7989 TREE_OPERAND (arg0, 1), tem));
7990
7991 /* Fold comparisons against built-in math functions. */
7992 if (TREE_CODE (arg1) == REAL_CST
7993 && flag_unsafe_math_optimizations
7994 && ! flag_errno_math)
7995 {
7996 enum built_in_function fcode = builtin_mathfn_code (arg0);
7997
7998 if (fcode != END_BUILTINS)
7999 {
8000 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8001 if (tem != NULL_TREE)
8002 return tem;
8003 }
8004 }
8005 }
8006
8007 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8008 if (TREE_CONSTANT (arg1)
8009 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8010 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8011 /* This optimization is invalid for ordered comparisons
8012 if CONST+INCR overflows or if foo+incr might overflow.
8013 This optimization is invalid for floating point due to rounding.
8014 For pointer types we assume overflow doesn't happen. */
8015 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8016 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8017 && (code == EQ_EXPR || code == NE_EXPR))))
8018 {
8019 tree varop, newconst;
8020
8021 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8022 {
8023 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
8024 arg1, TREE_OPERAND (arg0, 1)));
8025 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8026 TREE_OPERAND (arg0, 0),
8027 TREE_OPERAND (arg0, 1));
8028 }
8029 else
8030 {
8031 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
8032 arg1, TREE_OPERAND (arg0, 1)));
8033 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8034 TREE_OPERAND (arg0, 0),
8035 TREE_OPERAND (arg0, 1));
8036 }
8037
8038
8039 /* If VAROP is a reference to a bitfield, we must mask
8040 the constant by the width of the field. */
8041 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8042 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8043 && host_integerp (DECL_SIZE (TREE_OPERAND
8044 (TREE_OPERAND (varop, 0), 1)), 1))
8045 {
8046 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8047 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8048 tree folded_compare, shift;
8049
8050 /* First check whether the comparison would come out
8051 always the same. If we don't do that we would
8052 change the meaning with the masking. */
8053 folded_compare = fold (build2 (code, type,
8054 TREE_OPERAND (varop, 0), arg1));
8055 if (integer_zerop (folded_compare)
8056 || integer_onep (folded_compare))
8057 return omit_one_operand (type, folded_compare, varop);
8058
8059 shift = build_int_2 (TYPE_PRECISION (TREE_TYPE (varop)) - size,
8060 0);
8061 shift = fold_convert (TREE_TYPE (varop), shift);
8062 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8063 newconst, shift));
8064 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8065 newconst, shift));
8066 }
8067
8068 return fold (build2 (code, type, varop, newconst));
8069 }
8070
8071 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
8072 This transformation affects the cases which are handled in later
8073 optimizations involving comparisons with non-negative constants. */
8074 if (TREE_CODE (arg1) == INTEGER_CST
8075 && TREE_CODE (arg0) != INTEGER_CST
8076 && tree_int_cst_sgn (arg1) > 0)
8077 {
8078 switch (code)
8079 {
8080 case GE_EXPR:
8081 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8082 return fold (build2 (GT_EXPR, type, arg0, arg1));
8083
8084 case LT_EXPR:
8085 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8086 return fold (build2 (LE_EXPR, type, arg0, arg1));
8087
8088 default:
8089 break;
8090 }
8091 }
8092
8093 /* Comparisons with the highest or lowest possible integer of
8094 the specified size will have known values.
8095
8096 This is quite similar to fold_relational_hi_lo; however, my
8097 attempts to share the code have been nothing but trouble.
8098 I give up for now. */
8099 {
8100 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
8101
8102 if (TREE_CODE (arg1) == INTEGER_CST
8103 && ! TREE_CONSTANT_OVERFLOW (arg1)
8104 && width <= HOST_BITS_PER_WIDE_INT
8105 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
8106 || POINTER_TYPE_P (TREE_TYPE (arg1))))
8107 {
8108 unsigned HOST_WIDE_INT signed_max;
8109 unsigned HOST_WIDE_INT max, min;
8110
8111 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
8112
8113 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8114 {
8115 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
8116 min = 0;
8117 }
8118 else
8119 {
8120 max = signed_max;
8121 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
8122 }
8123
8124 if (TREE_INT_CST_HIGH (arg1) == 0
8125 && TREE_INT_CST_LOW (arg1) == max)
8126 switch (code)
8127 {
8128 case GT_EXPR:
8129 return omit_one_operand (type, integer_zero_node, arg0);
8130
8131 case GE_EXPR:
8132 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8133
8134 case LE_EXPR:
8135 return omit_one_operand (type, integer_one_node, arg0);
8136
8137 case LT_EXPR:
8138 return fold (build2 (NE_EXPR, type, arg0, arg1));
8139
8140 /* The GE_EXPR and LT_EXPR cases above are not normally
8141 reached because of previous transformations. */
8142
8143 default:
8144 break;
8145 }
8146 else if (TREE_INT_CST_HIGH (arg1) == 0
8147 && TREE_INT_CST_LOW (arg1) == max - 1)
8148 switch (code)
8149 {
8150 case GT_EXPR:
8151 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8152 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8153 case LE_EXPR:
8154 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8155 return fold (build2 (NE_EXPR, type, arg0, arg1));
8156 default:
8157 break;
8158 }
8159 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8160 && TREE_INT_CST_LOW (arg1) == min)
8161 switch (code)
8162 {
8163 case LT_EXPR:
8164 return omit_one_operand (type, integer_zero_node, arg0);
8165
8166 case LE_EXPR:
8167 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8168
8169 case GE_EXPR:
8170 return omit_one_operand (type, integer_one_node, arg0);
8171
8172 case GT_EXPR:
8173 return fold (build2 (NE_EXPR, type, arg0, arg1));
8174
8175 default:
8176 break;
8177 }
8178 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8179 && TREE_INT_CST_LOW (arg1) == min + 1)
8180 switch (code)
8181 {
8182 case GE_EXPR:
8183 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8184 return fold (build2 (NE_EXPR, type, arg0, arg1));
8185 case LT_EXPR:
8186 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8187 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8188 default:
8189 break;
8190 }
8191
8192 else if (!in_gimple_form
8193 && TREE_INT_CST_HIGH (arg1) == 0
8194 && TREE_INT_CST_LOW (arg1) == signed_max
8195 && TYPE_UNSIGNED (TREE_TYPE (arg1))
8196 /* signed_type does not work on pointer types. */
8197 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
8198 {
8199 /* The following case also applies to X < signed_max+1
8200 and X >= signed_max+1 because previous transformations. */
8201 if (code == LE_EXPR || code == GT_EXPR)
8202 {
8203 tree st0, st1;
8204 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
8205 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
8206 return fold
8207 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
8208 type, fold_convert (st0, arg0),
8209 fold_convert (st1, integer_zero_node)));
8210 }
8211 }
8212 }
8213 }
8214
8215 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
8216 a MINUS_EXPR of a constant, we can convert it into a comparison with
8217 a revised constant as long as no overflow occurs. */
8218 if ((code == EQ_EXPR || code == NE_EXPR)
8219 && TREE_CODE (arg1) == INTEGER_CST
8220 && (TREE_CODE (arg0) == PLUS_EXPR
8221 || TREE_CODE (arg0) == MINUS_EXPR)
8222 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8223 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8224 ? MINUS_EXPR : PLUS_EXPR,
8225 arg1, TREE_OPERAND (arg0, 1), 0))
8226 && ! TREE_CONSTANT_OVERFLOW (tem))
8227 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8228
8229 /* Similarly for a NEGATE_EXPR. */
8230 else if ((code == EQ_EXPR || code == NE_EXPR)
8231 && TREE_CODE (arg0) == NEGATE_EXPR
8232 && TREE_CODE (arg1) == INTEGER_CST
8233 && 0 != (tem = negate_expr (arg1))
8234 && TREE_CODE (tem) == INTEGER_CST
8235 && ! TREE_CONSTANT_OVERFLOW (tem))
8236 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8237
8238 /* If we have X - Y == 0, we can convert that to X == Y and similarly
8239 for !=. Don't do this for ordered comparisons due to overflow. */
8240 else if ((code == NE_EXPR || code == EQ_EXPR)
8241 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
8242 return fold (build2 (code, type,
8243 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
8244
8245 /* If we are widening one operand of an integer comparison,
8246 see if the other operand is similarly being widened. Perhaps we
8247 can do the comparison in the narrower type. */
8248 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8249 && TREE_CODE (arg0) == NOP_EXPR
8250 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
8251 && (code == EQ_EXPR || code == NE_EXPR
8252 || TYPE_UNSIGNED (TREE_TYPE (arg0))
8253 == TYPE_UNSIGNED (TREE_TYPE (tem)))
8254 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
8255 && (TREE_TYPE (t1) == TREE_TYPE (tem)
8256 || (TREE_CODE (t1) == INTEGER_CST
8257 && int_fits_type_p (t1, TREE_TYPE (tem)))))
8258 return fold (build2 (code, type, tem,
8259 fold_convert (TREE_TYPE (tem), t1)));
8260
8261 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8262 constant, we can simplify it. */
8263 else if (TREE_CODE (arg1) == INTEGER_CST
8264 && (TREE_CODE (arg0) == MIN_EXPR
8265 || TREE_CODE (arg0) == MAX_EXPR)
8266 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8267 return optimize_minmax_comparison (t);
8268
8269 /* If we are comparing an ABS_EXPR with a constant, we can
8270 convert all the cases into explicit comparisons, but they may
8271 well not be faster than doing the ABS and one comparison.
8272 But ABS (X) <= C is a range comparison, which becomes a subtraction
8273 and a comparison, and is probably faster. */
8274 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8275 && TREE_CODE (arg0) == ABS_EXPR
8276 && ! TREE_SIDE_EFFECTS (arg0)
8277 && (0 != (tem = negate_expr (arg1)))
8278 && TREE_CODE (tem) == INTEGER_CST
8279 && ! TREE_CONSTANT_OVERFLOW (tem))
8280 return fold (build2 (TRUTH_ANDIF_EXPR, type,
8281 build2 (GE_EXPR, type,
8282 TREE_OPERAND (arg0, 0), tem),
8283 build2 (LE_EXPR, type,
8284 TREE_OPERAND (arg0, 0), arg1)));
8285
8286 /* If this is an EQ or NE comparison with zero and ARG0 is
8287 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
8288 two operations, but the latter can be done in one less insn
8289 on machines that have only two-operand insns or on which a
8290 constant cannot be the first operand. */
8291 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
8292 && TREE_CODE (arg0) == BIT_AND_EXPR)
8293 {
8294 tree arg00 = TREE_OPERAND (arg0, 0);
8295 tree arg01 = TREE_OPERAND (arg0, 1);
8296 if (TREE_CODE (arg00) == LSHIFT_EXPR
8297 && integer_onep (TREE_OPERAND (arg00, 0)))
8298 return
8299 fold (build2 (code, type,
8300 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8301 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
8302 arg01, TREE_OPERAND (arg00, 1)),
8303 fold_convert (TREE_TYPE (arg0),
8304 integer_one_node)),
8305 arg1));
8306 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
8307 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
8308 return
8309 fold (build2 (code, type,
8310 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8311 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
8312 arg00, TREE_OPERAND (arg01, 1)),
8313 fold_convert (TREE_TYPE (arg0),
8314 integer_one_node)),
8315 arg1));
8316 }
8317
8318 /* If this is an NE or EQ comparison of zero against the result of a
8319 signed MOD operation whose second operand is a power of 2, make
8320 the MOD operation unsigned since it is simpler and equivalent. */
8321 if ((code == NE_EXPR || code == EQ_EXPR)
8322 && integer_zerop (arg1)
8323 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8324 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
8325 || TREE_CODE (arg0) == CEIL_MOD_EXPR
8326 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
8327 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
8328 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8329 {
8330 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
8331 tree newmod = fold (build2 (TREE_CODE (arg0), newtype,
8332 fold_convert (newtype,
8333 TREE_OPERAND (arg0, 0)),
8334 fold_convert (newtype,
8335 TREE_OPERAND (arg0, 1))));
8336
8337 return fold (build2 (code, type, newmod,
8338 fold_convert (newtype, arg1)));
8339 }
8340
8341 /* If this is an NE comparison of zero with an AND of one, remove the
8342 comparison since the AND will give the correct value. */
8343 if (code == NE_EXPR && integer_zerop (arg1)
8344 && TREE_CODE (arg0) == BIT_AND_EXPR
8345 && integer_onep (TREE_OPERAND (arg0, 1)))
8346 return fold_convert (type, arg0);
8347
8348 /* If we have (A & C) == C where C is a power of 2, convert this into
8349 (A & C) != 0. Similarly for NE_EXPR. */
8350 if ((code == EQ_EXPR || code == NE_EXPR)
8351 && TREE_CODE (arg0) == BIT_AND_EXPR
8352 && integer_pow2p (TREE_OPERAND (arg0, 1))
8353 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8354 return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
8355 arg0, integer_zero_node));
8356
8357 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
8358 2, then fold the expression into shifts and logical operations. */
8359 tem = fold_single_bit_test (code, arg0, arg1, type);
8360 if (tem)
8361 return tem;
8362
8363 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
8364 Similarly for NE_EXPR. */
8365 if ((code == EQ_EXPR || code == NE_EXPR)
8366 && TREE_CODE (arg0) == BIT_AND_EXPR
8367 && TREE_CODE (arg1) == INTEGER_CST
8368 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8369 {
8370 tree dandnotc
8371 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8372 arg1, build1 (BIT_NOT_EXPR,
8373 TREE_TYPE (TREE_OPERAND (arg0, 1)),
8374 TREE_OPERAND (arg0, 1))));
8375 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8376 if (integer_nonzerop (dandnotc))
8377 return omit_one_operand (type, rslt, arg0);
8378 }
8379
8380 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
8381 Similarly for NE_EXPR. */
8382 if ((code == EQ_EXPR || code == NE_EXPR)
8383 && TREE_CODE (arg0) == BIT_IOR_EXPR
8384 && TREE_CODE (arg1) == INTEGER_CST
8385 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8386 {
8387 tree candnotd
8388 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8389 TREE_OPERAND (arg0, 1),
8390 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
8391 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8392 if (integer_nonzerop (candnotd))
8393 return omit_one_operand (type, rslt, arg0);
8394 }
8395
8396 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
8397 and similarly for >= into !=. */
8398 if ((code == LT_EXPR || code == GE_EXPR)
8399 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8400 && TREE_CODE (arg1) == LSHIFT_EXPR
8401 && integer_onep (TREE_OPERAND (arg1, 0)))
8402 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8403 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8404 TREE_OPERAND (arg1, 1)),
8405 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8406
8407 else if ((code == LT_EXPR || code == GE_EXPR)
8408 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8409 && (TREE_CODE (arg1) == NOP_EXPR
8410 || TREE_CODE (arg1) == CONVERT_EXPR)
8411 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
8412 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
8413 return
8414 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8415 fold_convert (TREE_TYPE (arg0),
8416 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8417 TREE_OPERAND (TREE_OPERAND (arg1, 0),
8418 1))),
8419 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8420
8421 /* Simplify comparison of something with itself. (For IEEE
8422 floating-point, we can only do some of these simplifications.) */
8423 if (operand_equal_p (arg0, arg1, 0))
8424 {
8425 switch (code)
8426 {
8427 case EQ_EXPR:
8428 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8429 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8430 return constant_boolean_node (1, type);
8431 break;
8432
8433 case GE_EXPR:
8434 case LE_EXPR:
8435 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8436 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8437 return constant_boolean_node (1, type);
8438 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8439
8440 case NE_EXPR:
8441 /* For NE, we can only do this simplification if integer
8442 or we don't honor IEEE floating point NaNs. */
8443 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8444 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8445 break;
8446 /* ... fall through ... */
8447 case GT_EXPR:
8448 case LT_EXPR:
8449 return constant_boolean_node (0, type);
8450 default:
8451 abort ();
8452 }
8453 }
8454
8455 /* If we are comparing an expression that just has comparisons
8456 of two integer values, arithmetic expressions of those comparisons,
8457 and constants, we can simplify it. There are only three cases
8458 to check: the two values can either be equal, the first can be
8459 greater, or the second can be greater. Fold the expression for
8460 those three values. Since each value must be 0 or 1, we have
8461 eight possibilities, each of which corresponds to the constant 0
8462 or 1 or one of the six possible comparisons.
8463
8464 This handles common cases like (a > b) == 0 but also handles
8465 expressions like ((x > y) - (y > x)) > 0, which supposedly
8466 occur in macroized code. */
8467
8468 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8469 {
8470 tree cval1 = 0, cval2 = 0;
8471 int save_p = 0;
8472
8473 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8474 /* Don't handle degenerate cases here; they should already
8475 have been handled anyway. */
8476 && cval1 != 0 && cval2 != 0
8477 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8478 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8479 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8480 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8481 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8482 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8483 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8484 {
8485 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8486 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8487
8488 /* We can't just pass T to eval_subst in case cval1 or cval2
8489 was the same as ARG1. */
8490
8491 tree high_result
8492 = fold (build2 (code, type,
8493 eval_subst (arg0, cval1, maxval,
8494 cval2, minval),
8495 arg1));
8496 tree equal_result
8497 = fold (build2 (code, type,
8498 eval_subst (arg0, cval1, maxval,
8499 cval2, maxval),
8500 arg1));
8501 tree low_result
8502 = fold (build2 (code, type,
8503 eval_subst (arg0, cval1, minval,
8504 cval2, maxval),
8505 arg1));
8506
8507 /* All three of these results should be 0 or 1. Confirm they
8508 are. Then use those values to select the proper code
8509 to use. */
8510
8511 if ((integer_zerop (high_result)
8512 || integer_onep (high_result))
8513 && (integer_zerop (equal_result)
8514 || integer_onep (equal_result))
8515 && (integer_zerop (low_result)
8516 || integer_onep (low_result)))
8517 {
8518 /* Make a 3-bit mask with the high-order bit being the
8519 value for `>', the next for '=', and the low for '<'. */
8520 switch ((integer_onep (high_result) * 4)
8521 + (integer_onep (equal_result) * 2)
8522 + integer_onep (low_result))
8523 {
8524 case 0:
8525 /* Always false. */
8526 return omit_one_operand (type, integer_zero_node, arg0);
8527 case 1:
8528 code = LT_EXPR;
8529 break;
8530 case 2:
8531 code = EQ_EXPR;
8532 break;
8533 case 3:
8534 code = LE_EXPR;
8535 break;
8536 case 4:
8537 code = GT_EXPR;
8538 break;
8539 case 5:
8540 code = NE_EXPR;
8541 break;
8542 case 6:
8543 code = GE_EXPR;
8544 break;
8545 case 7:
8546 /* Always true. */
8547 return omit_one_operand (type, integer_one_node, arg0);
8548 }
8549
8550 tem = build2 (code, type, cval1, cval2);
8551 if (save_p)
8552 return save_expr (tem);
8553 else
8554 return fold (tem);
8555 }
8556 }
8557 }
8558
8559 /* If this is a comparison of a field, we may be able to simplify it. */
8560 if (((TREE_CODE (arg0) == COMPONENT_REF
8561 && lang_hooks.can_use_bit_fields_p ())
8562 || TREE_CODE (arg0) == BIT_FIELD_REF)
8563 && (code == EQ_EXPR || code == NE_EXPR)
8564 /* Handle the constant case even without -O
8565 to make sure the warnings are given. */
8566 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
8567 {
8568 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
8569 if (t1)
8570 return t1;
8571 }
8572
8573 /* If this is a comparison of complex values and either or both sides
8574 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
8575 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
8576 This may prevent needless evaluations. */
8577 if ((code == EQ_EXPR || code == NE_EXPR)
8578 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
8579 && (TREE_CODE (arg0) == COMPLEX_EXPR
8580 || TREE_CODE (arg1) == COMPLEX_EXPR
8581 || TREE_CODE (arg0) == COMPLEX_CST
8582 || TREE_CODE (arg1) == COMPLEX_CST))
8583 {
8584 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
8585 tree real0, imag0, real1, imag1;
8586
8587 arg0 = save_expr (arg0);
8588 arg1 = save_expr (arg1);
8589 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
8590 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
8591 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
8592 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
8593
8594 return fold (build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
8595 : TRUTH_ORIF_EXPR),
8596 type,
8597 fold (build2 (code, type, real0, real1)),
8598 fold (build2 (code, type, imag0, imag1))));
8599 }
8600
8601 /* Optimize comparisons of strlen vs zero to a compare of the
8602 first character of the string vs zero. To wit,
8603 strlen(ptr) == 0 => *ptr == 0
8604 strlen(ptr) != 0 => *ptr != 0
8605 Other cases should reduce to one of these two (or a constant)
8606 due to the return value of strlen being unsigned. */
8607 if ((code == EQ_EXPR || code == NE_EXPR)
8608 && integer_zerop (arg1)
8609 && TREE_CODE (arg0) == CALL_EXPR)
8610 {
8611 tree fndecl = get_callee_fndecl (arg0);
8612 tree arglist;
8613
8614 if (fndecl
8615 && DECL_BUILT_IN (fndecl)
8616 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
8617 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
8618 && (arglist = TREE_OPERAND (arg0, 1))
8619 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
8620 && ! TREE_CHAIN (arglist))
8621 return fold (build2 (code, type,
8622 build1 (INDIRECT_REF, char_type_node,
8623 TREE_VALUE(arglist)),
8624 integer_zero_node));
8625 }
8626
8627 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8628 into a single range test. */
8629 if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
8630 && TREE_CODE (arg1) == INTEGER_CST
8631 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8632 && !integer_zerop (TREE_OPERAND (arg0, 1))
8633 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8634 && !TREE_OVERFLOW (arg1))
8635 {
8636 t1 = fold_div_compare (code, type, arg0, arg1);
8637 if (t1 != NULL_TREE)
8638 return t1;
8639 }
8640
8641 /* Both ARG0 and ARG1 are known to be constants at this point. */
8642 t1 = fold_relational_const (code, type, arg0, arg1);
8643 return (t1 == NULL_TREE ? t : t1);
8644
8645 case UNORDERED_EXPR:
8646 case ORDERED_EXPR:
8647 case UNLT_EXPR:
8648 case UNLE_EXPR:
8649 case UNGT_EXPR:
8650 case UNGE_EXPR:
8651 case UNEQ_EXPR:
8652 case LTGT_EXPR:
8653 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8654 {
8655 t1 = fold_relational_const (code, type, arg0, arg1);
8656 if (t1 != NULL_TREE)
8657 return t1;
8658 }
8659
8660 /* If the first operand is NaN, the result is constant. */
8661 if (TREE_CODE (arg0) == REAL_CST
8662 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
8663 && (code != LTGT_EXPR || ! flag_trapping_math))
8664 {
8665 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
8666 ? integer_zero_node
8667 : integer_one_node;
8668 return omit_one_operand (type, t1, arg1);
8669 }
8670
8671 /* If the second operand is NaN, the result is constant. */
8672 if (TREE_CODE (arg1) == REAL_CST
8673 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
8674 && (code != LTGT_EXPR || ! flag_trapping_math))
8675 {
8676 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
8677 ? integer_zero_node
8678 : integer_one_node;
8679 return omit_one_operand (type, t1, arg0);
8680 }
8681
8682 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8683 {
8684 tree targ0 = strip_float_extensions (arg0);
8685 tree targ1 = strip_float_extensions (arg1);
8686 tree newtype = TREE_TYPE (targ0);
8687
8688 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8689 newtype = TREE_TYPE (targ1);
8690
8691 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8692 return fold (build2 (code, type, fold_convert (newtype, targ0),
8693 fold_convert (newtype, targ1)));
8694 }
8695
8696 return t;
8697
8698 case COND_EXPR:
8699 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
8700 so all simple results must be passed through pedantic_non_lvalue. */
8701 if (TREE_CODE (arg0) == INTEGER_CST)
8702 {
8703 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
8704 /* Only optimize constant conditions when the selected branch
8705 has the same type as the COND_EXPR. This avoids optimizing
8706 away "c ? x : throw", where the throw has a void type. */
8707 if (! VOID_TYPE_P (TREE_TYPE (tem))
8708 || VOID_TYPE_P (type))
8709 return pedantic_non_lvalue (tem);
8710 return t;
8711 }
8712 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
8713 return pedantic_omit_one_operand (type, arg1, arg0);
8714
8715 /* If we have A op B ? A : C, we may be able to convert this to a
8716 simpler expression, depending on the operation and the values
8717 of B and C. Signed zeros prevent all of these transformations,
8718 for reasons given above each one.
8719
8720 Also try swapping the arguments and inverting the conditional. */
8721 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8722 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8723 arg1, TREE_OPERAND (arg0, 1))
8724 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
8725 {
8726 tem = fold_cond_expr_with_comparison (type, arg0,
8727 TREE_OPERAND (t, 1),
8728 TREE_OPERAND (t, 2));
8729 if (tem)
8730 return tem;
8731 }
8732
8733 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8734 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8735 TREE_OPERAND (t, 2),
8736 TREE_OPERAND (arg0, 1))
8737 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 2)))))
8738 {
8739 tem = invert_truthvalue (arg0);
8740 if (TREE_CODE_CLASS (TREE_CODE (tem)) == '<')
8741 {
8742 tem = fold_cond_expr_with_comparison (type, tem,
8743 TREE_OPERAND (t, 2),
8744 TREE_OPERAND (t, 1));
8745 if (tem)
8746 return tem;
8747 }
8748 }
8749
8750 /* If the second operand is simpler than the third, swap them
8751 since that produces better jump optimization results. */
8752 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8753 TREE_OPERAND (t, 2), false))
8754 {
8755 /* See if this can be inverted. If it can't, possibly because
8756 it was a floating-point inequality comparison, don't do
8757 anything. */
8758 tem = invert_truthvalue (arg0);
8759
8760 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8761 return fold (build3 (code, type, tem,
8762 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8763 }
8764
8765 /* Convert A ? 1 : 0 to simply A. */
8766 if (integer_onep (TREE_OPERAND (t, 1))
8767 && integer_zerop (TREE_OPERAND (t, 2))
8768 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8769 call to fold will try to move the conversion inside
8770 a COND, which will recurse. In that case, the COND_EXPR
8771 is probably the best choice, so leave it alone. */
8772 && type == TREE_TYPE (arg0))
8773 return pedantic_non_lvalue (arg0);
8774
8775 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8776 over COND_EXPR in cases such as floating point comparisons. */
8777 if (integer_zerop (TREE_OPERAND (t, 1))
8778 && integer_onep (TREE_OPERAND (t, 2))
8779 && truth_value_p (TREE_CODE (arg0)))
8780 return pedantic_non_lvalue (fold_convert (type,
8781 invert_truthvalue (arg0)));
8782
8783 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
8784 if (TREE_CODE (arg0) == LT_EXPR
8785 && integer_zerop (TREE_OPERAND (arg0, 1))
8786 && integer_zerop (TREE_OPERAND (t, 2))
8787 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
8788 return fold_convert (type, fold (build2 (BIT_AND_EXPR,
8789 TREE_TYPE (tem), tem, arg1)));
8790
8791 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
8792 already handled above. */
8793 if (TREE_CODE (arg0) == BIT_AND_EXPR
8794 && integer_onep (TREE_OPERAND (arg0, 1))
8795 && integer_zerop (TREE_OPERAND (t, 2))
8796 && integer_pow2p (arg1))
8797 {
8798 tree tem = TREE_OPERAND (arg0, 0);
8799 STRIP_NOPS (tem);
8800 if (TREE_CODE (tem) == RSHIFT_EXPR
8801 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
8802 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
8803 return fold (build2 (BIT_AND_EXPR, type,
8804 TREE_OPERAND (tem, 0), arg1));
8805 }
8806
8807 /* A & N ? N : 0 is simply A & N if N is a power of two. This
8808 is probably obsolete because the first operand should be a
8809 truth value (that's why we have the two cases above), but let's
8810 leave it in until we can confirm this for all front-ends. */
8811 if (integer_zerop (TREE_OPERAND (t, 2))
8812 && TREE_CODE (arg0) == NE_EXPR
8813 && integer_zerop (TREE_OPERAND (arg0, 1))
8814 && integer_pow2p (arg1)
8815 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8816 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8817 arg1, OEP_ONLY_CONST))
8818 return pedantic_non_lvalue (fold_convert (type,
8819 TREE_OPERAND (arg0, 0)));
8820
8821 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8822 if (integer_zerop (TREE_OPERAND (t, 2))
8823 && truth_value_p (TREE_CODE (arg0))
8824 && truth_value_p (TREE_CODE (arg1)))
8825 return fold (build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1));
8826
8827 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8828 if (integer_onep (TREE_OPERAND (t, 2))
8829 && truth_value_p (TREE_CODE (arg0))
8830 && truth_value_p (TREE_CODE (arg1)))
8831 {
8832 /* Only perform transformation if ARG0 is easily inverted. */
8833 tem = invert_truthvalue (arg0);
8834 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8835 return fold (build2 (TRUTH_ORIF_EXPR, type, tem, arg1));
8836 }
8837
8838 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
8839 if (integer_zerop (arg1)
8840 && truth_value_p (TREE_CODE (arg0))
8841 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
8842 {
8843 /* Only perform transformation if ARG0 is easily inverted. */
8844 tem = invert_truthvalue (arg0);
8845 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8846 return fold (build2 (TRUTH_ANDIF_EXPR, type, tem,
8847 TREE_OPERAND (t, 2)));
8848 }
8849
8850 /* Convert A ? 1 : B into A || B if A and B are truth values. */
8851 if (integer_onep (arg1)
8852 && truth_value_p (TREE_CODE (arg0))
8853 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
8854 return fold (build2 (TRUTH_ORIF_EXPR, type, arg0,
8855 TREE_OPERAND (t, 2)));
8856
8857 return t;
8858
8859 case COMPOUND_EXPR:
8860 /* When pedantic, a compound expression can be neither an lvalue
8861 nor an integer constant expression. */
8862 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
8863 return t;
8864 /* Don't let (0, 0) be null pointer constant. */
8865 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
8866 : fold_convert (type, arg1);
8867 return pedantic_non_lvalue (tem);
8868
8869 case COMPLEX_EXPR:
8870 if (wins)
8871 return build_complex (type, arg0, arg1);
8872 return t;
8873
8874 case REALPART_EXPR:
8875 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8876 return t;
8877 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8878 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8879 TREE_OPERAND (arg0, 1));
8880 else if (TREE_CODE (arg0) == COMPLEX_CST)
8881 return TREE_REALPART (arg0);
8882 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8883 return fold (build2 (TREE_CODE (arg0), type,
8884 fold (build1 (REALPART_EXPR, type,
8885 TREE_OPERAND (arg0, 0))),
8886 fold (build1 (REALPART_EXPR, type,
8887 TREE_OPERAND (arg0, 1)))));
8888 return t;
8889
8890 case IMAGPART_EXPR:
8891 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8892 return fold_convert (type, integer_zero_node);
8893 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8894 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8895 TREE_OPERAND (arg0, 0));
8896 else if (TREE_CODE (arg0) == COMPLEX_CST)
8897 return TREE_IMAGPART (arg0);
8898 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8899 return fold (build2 (TREE_CODE (arg0), type,
8900 fold (build1 (IMAGPART_EXPR, type,
8901 TREE_OPERAND (arg0, 0))),
8902 fold (build1 (IMAGPART_EXPR, type,
8903 TREE_OPERAND (arg0, 1)))));
8904 return t;
8905
8906 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8907 appropriate. */
8908 case CLEANUP_POINT_EXPR:
8909 if (! has_cleanups (arg0))
8910 return TREE_OPERAND (t, 0);
8911
8912 {
8913 enum tree_code code0 = TREE_CODE (arg0);
8914 int kind0 = TREE_CODE_CLASS (code0);
8915 tree arg00 = TREE_OPERAND (arg0, 0);
8916 tree arg01;
8917
8918 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8919 return fold (build1 (code0, type,
8920 fold (build1 (CLEANUP_POINT_EXPR,
8921 TREE_TYPE (arg00), arg00))));
8922
8923 if (kind0 == '<' || kind0 == '2'
8924 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8925 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8926 || code0 == TRUTH_XOR_EXPR)
8927 {
8928 arg01 = TREE_OPERAND (arg0, 1);
8929
8930 if (TREE_CONSTANT (arg00)
8931 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8932 && ! has_cleanups (arg00)))
8933 return fold (build2 (code0, type, arg00,
8934 fold (build1 (CLEANUP_POINT_EXPR,
8935 TREE_TYPE (arg01), arg01))));
8936
8937 if (TREE_CONSTANT (arg01))
8938 return fold (build2 (code0, type,
8939 fold (build1 (CLEANUP_POINT_EXPR,
8940 TREE_TYPE (arg00), arg00)),
8941 arg01));
8942 }
8943
8944 return t;
8945 }
8946
8947 case CALL_EXPR:
8948 /* Check for a built-in function. */
8949 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
8950 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
8951 == FUNCTION_DECL)
8952 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
8953 {
8954 tree tmp = fold_builtin (t, false);
8955 if (tmp)
8956 return tmp;
8957 }
8958 return t;
8959
8960 default:
8961 return t;
8962 } /* switch (code) */
8963 }
8964
8965 #ifdef ENABLE_FOLD_CHECKING
8966 #undef fold
8967
8968 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8969 static void fold_check_failed (tree, tree);
8970 void print_fold_checksum (tree);
8971
8972 /* When --enable-checking=fold, compute a digest of expr before
8973 and after actual fold call to see if fold did not accidentally
8974 change original expr. */
8975
8976 tree
8977 fold (tree expr)
8978 {
8979 tree ret;
8980 struct md5_ctx ctx;
8981 unsigned char checksum_before[16], checksum_after[16];
8982 htab_t ht;
8983
8984 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8985 md5_init_ctx (&ctx);
8986 fold_checksum_tree (expr, &ctx, ht);
8987 md5_finish_ctx (&ctx, checksum_before);
8988 htab_empty (ht);
8989
8990 ret = fold_1 (expr);
8991
8992 md5_init_ctx (&ctx);
8993 fold_checksum_tree (expr, &ctx, ht);
8994 md5_finish_ctx (&ctx, checksum_after);
8995 htab_delete (ht);
8996
8997 if (memcmp (checksum_before, checksum_after, 16))
8998 fold_check_failed (expr, ret);
8999
9000 return ret;
9001 }
9002
9003 void
9004 print_fold_checksum (tree expr)
9005 {
9006 struct md5_ctx ctx;
9007 unsigned char checksum[16], cnt;
9008 htab_t ht;
9009
9010 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9011 md5_init_ctx (&ctx);
9012 fold_checksum_tree (expr, &ctx, ht);
9013 md5_finish_ctx (&ctx, checksum);
9014 htab_delete (ht);
9015 for (cnt = 0; cnt < 16; ++cnt)
9016 fprintf (stderr, "%02x", checksum[cnt]);
9017 putc ('\n', stderr);
9018 }
9019
9020 static void
9021 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
9022 {
9023 internal_error ("fold check: original tree changed by fold");
9024 }
9025
9026 static void
9027 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
9028 {
9029 void **slot;
9030 enum tree_code code;
9031 char buf[sizeof (struct tree_decl)];
9032 int i, len;
9033
9034 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
9035 > sizeof (struct tree_decl)
9036 || sizeof (struct tree_type) > sizeof (struct tree_decl))
9037 abort ();
9038 if (expr == NULL)
9039 return;
9040 slot = htab_find_slot (ht, expr, INSERT);
9041 if (*slot != NULL)
9042 return;
9043 *slot = expr;
9044 code = TREE_CODE (expr);
9045 if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
9046 {
9047 /* Allow DECL_ASSEMBLER_NAME to be modified. */
9048 memcpy (buf, expr, tree_size (expr));
9049 expr = (tree) buf;
9050 SET_DECL_ASSEMBLER_NAME (expr, NULL);
9051 }
9052 else if (TREE_CODE_CLASS (code) == 't'
9053 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
9054 {
9055 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
9056 memcpy (buf, expr, tree_size (expr));
9057 expr = (tree) buf;
9058 TYPE_POINTER_TO (expr) = NULL;
9059 TYPE_REFERENCE_TO (expr) = NULL;
9060 }
9061 md5_process_bytes (expr, tree_size (expr), ctx);
9062 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
9063 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
9064 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
9065 switch (TREE_CODE_CLASS (code))
9066 {
9067 case 'c':
9068 switch (code)
9069 {
9070 case STRING_CST:
9071 md5_process_bytes (TREE_STRING_POINTER (expr),
9072 TREE_STRING_LENGTH (expr), ctx);
9073 break;
9074 case COMPLEX_CST:
9075 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
9076 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
9077 break;
9078 case VECTOR_CST:
9079 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
9080 break;
9081 default:
9082 break;
9083 }
9084 break;
9085 case 'x':
9086 switch (code)
9087 {
9088 case TREE_LIST:
9089 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
9090 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
9091 break;
9092 case TREE_VEC:
9093 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
9094 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
9095 break;
9096 default:
9097 break;
9098 }
9099 break;
9100 case 'e':
9101 case 'r':
9102 case '<':
9103 case '1':
9104 case '2':
9105 case 's':
9106 len = first_rtl_op (code);
9107 for (i = 0; i < len; ++i)
9108 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
9109 break;
9110 case 'd':
9111 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
9112 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
9113 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
9114 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
9115 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
9116 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
9117 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
9118 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
9119 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
9120 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
9121 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
9122 break;
9123 case 't':
9124 if (TREE_CODE (expr) == ENUMERAL_TYPE)
9125 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
9126 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
9127 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
9128 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
9129 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
9130 if (INTEGRAL_TYPE_P (expr)
9131 || SCALAR_FLOAT_TYPE_P (expr))
9132 {
9133 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
9134 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
9135 }
9136 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
9137 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
9138 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
9139 break;
9140 default:
9141 break;
9142 }
9143 }
9144
9145 #endif
9146
9147 /* Perform constant folding and related simplification of initializer
9148 expression EXPR. This behaves identically to "fold" but ignores
9149 potential run-time traps and exceptions that fold must preserve. */
9150
9151 tree
9152 fold_initializer (tree expr)
9153 {
9154 int saved_signaling_nans = flag_signaling_nans;
9155 int saved_trapping_math = flag_trapping_math;
9156 int saved_trapv = flag_trapv;
9157 tree result;
9158
9159 flag_signaling_nans = 0;
9160 flag_trapping_math = 0;
9161 flag_trapv = 0;
9162
9163 result = fold (expr);
9164
9165 flag_signaling_nans = saved_signaling_nans;
9166 flag_trapping_math = saved_trapping_math;
9167 flag_trapv = saved_trapv;
9168
9169 return result;
9170 }
9171
9172 /* Determine if first argument is a multiple of second argument. Return 0 if
9173 it is not, or we cannot easily determined it to be.
9174
9175 An example of the sort of thing we care about (at this point; this routine
9176 could surely be made more general, and expanded to do what the *_DIV_EXPR's
9177 fold cases do now) is discovering that
9178
9179 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9180
9181 is a multiple of
9182
9183 SAVE_EXPR (J * 8)
9184
9185 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
9186
9187 This code also handles discovering that
9188
9189 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9190
9191 is a multiple of 8 so we don't have to worry about dealing with a
9192 possible remainder.
9193
9194 Note that we *look* inside a SAVE_EXPR only to determine how it was
9195 calculated; it is not safe for fold to do much of anything else with the
9196 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
9197 at run time. For example, the latter example above *cannot* be implemented
9198 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
9199 evaluation time of the original SAVE_EXPR is not necessarily the same at
9200 the time the new expression is evaluated. The only optimization of this
9201 sort that would be valid is changing
9202
9203 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
9204
9205 divided by 8 to
9206
9207 SAVE_EXPR (I) * SAVE_EXPR (J)
9208
9209 (where the same SAVE_EXPR (J) is used in the original and the
9210 transformed version). */
9211
9212 static int
9213 multiple_of_p (tree type, tree top, tree bottom)
9214 {
9215 if (operand_equal_p (top, bottom, 0))
9216 return 1;
9217
9218 if (TREE_CODE (type) != INTEGER_TYPE)
9219 return 0;
9220
9221 switch (TREE_CODE (top))
9222 {
9223 case MULT_EXPR:
9224 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9225 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9226
9227 case PLUS_EXPR:
9228 case MINUS_EXPR:
9229 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9230 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9231
9232 case LSHIFT_EXPR:
9233 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
9234 {
9235 tree op1, t1;
9236
9237 op1 = TREE_OPERAND (top, 1);
9238 /* const_binop may not detect overflow correctly,
9239 so check for it explicitly here. */
9240 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
9241 > TREE_INT_CST_LOW (op1)
9242 && TREE_INT_CST_HIGH (op1) == 0
9243 && 0 != (t1 = fold_convert (type,
9244 const_binop (LSHIFT_EXPR,
9245 size_one_node,
9246 op1, 0)))
9247 && ! TREE_OVERFLOW (t1))
9248 return multiple_of_p (type, t1, bottom);
9249 }
9250 return 0;
9251
9252 case NOP_EXPR:
9253 /* Can't handle conversions from non-integral or wider integral type. */
9254 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
9255 || (TYPE_PRECISION (type)
9256 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
9257 return 0;
9258
9259 /* .. fall through ... */
9260
9261 case SAVE_EXPR:
9262 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
9263
9264 case INTEGER_CST:
9265 if (TREE_CODE (bottom) != INTEGER_CST
9266 || (TYPE_UNSIGNED (type)
9267 && (tree_int_cst_sgn (top) < 0
9268 || tree_int_cst_sgn (bottom) < 0)))
9269 return 0;
9270 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
9271 top, bottom, 0));
9272
9273 default:
9274 return 0;
9275 }
9276 }
9277
9278 /* Return true if `t' is known to be non-negative. */
9279
9280 int
9281 tree_expr_nonnegative_p (tree t)
9282 {
9283 switch (TREE_CODE (t))
9284 {
9285 case ABS_EXPR:
9286 return 1;
9287
9288 case INTEGER_CST:
9289 return tree_int_cst_sgn (t) >= 0;
9290
9291 case REAL_CST:
9292 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
9293
9294 case PLUS_EXPR:
9295 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9296 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9297 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9298
9299 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
9300 both unsigned and at least 2 bits shorter than the result. */
9301 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9302 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9303 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9304 {
9305 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9306 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9307 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9308 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9309 {
9310 unsigned int prec = MAX (TYPE_PRECISION (inner1),
9311 TYPE_PRECISION (inner2)) + 1;
9312 return prec < TYPE_PRECISION (TREE_TYPE (t));
9313 }
9314 }
9315 break;
9316
9317 case MULT_EXPR:
9318 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9319 {
9320 /* x * x for floating point x is always non-negative. */
9321 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
9322 return 1;
9323 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9324 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9325 }
9326
9327 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
9328 both unsigned and their total bits is shorter than the result. */
9329 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9330 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9331 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9332 {
9333 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9334 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9335 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9336 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9337 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
9338 < TYPE_PRECISION (TREE_TYPE (t));
9339 }
9340 return 0;
9341
9342 case TRUNC_DIV_EXPR:
9343 case CEIL_DIV_EXPR:
9344 case FLOOR_DIV_EXPR:
9345 case ROUND_DIV_EXPR:
9346 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9347 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9348
9349 case TRUNC_MOD_EXPR:
9350 case CEIL_MOD_EXPR:
9351 case FLOOR_MOD_EXPR:
9352 case ROUND_MOD_EXPR:
9353 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9354
9355 case RDIV_EXPR:
9356 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9357 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9358
9359 case BIT_AND_EXPR:
9360 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9361 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9362 case BIT_IOR_EXPR:
9363 case BIT_XOR_EXPR:
9364 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9365 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9366
9367 case NOP_EXPR:
9368 {
9369 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9370 tree outer_type = TREE_TYPE (t);
9371
9372 if (TREE_CODE (outer_type) == REAL_TYPE)
9373 {
9374 if (TREE_CODE (inner_type) == REAL_TYPE)
9375 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9376 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9377 {
9378 if (TYPE_UNSIGNED (inner_type))
9379 return 1;
9380 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9381 }
9382 }
9383 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
9384 {
9385 if (TREE_CODE (inner_type) == REAL_TYPE)
9386 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
9387 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9388 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
9389 && TYPE_UNSIGNED (inner_type);
9390 }
9391 }
9392 break;
9393
9394 case COND_EXPR:
9395 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9396 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
9397 case COMPOUND_EXPR:
9398 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9399 case MIN_EXPR:
9400 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9401 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9402 case MAX_EXPR:
9403 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9404 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9405 case MODIFY_EXPR:
9406 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9407 case BIND_EXPR:
9408 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
9409 case SAVE_EXPR:
9410 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9411 case NON_LVALUE_EXPR:
9412 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9413 case FLOAT_EXPR:
9414 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9415
9416 case TARGET_EXPR:
9417 {
9418 tree temp = TARGET_EXPR_SLOT (t);
9419 t = TARGET_EXPR_INITIAL (t);
9420
9421 /* If the initializer is non-void, then it's a normal expression
9422 that will be assigned to the slot. */
9423 if (!VOID_TYPE_P (t))
9424 return tree_expr_nonnegative_p (t);
9425
9426 /* Otherwise, the initializer sets the slot in some way. One common
9427 way is an assignment statement at the end of the initializer. */
9428 while (1)
9429 {
9430 if (TREE_CODE (t) == BIND_EXPR)
9431 t = expr_last (BIND_EXPR_BODY (t));
9432 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
9433 || TREE_CODE (t) == TRY_CATCH_EXPR)
9434 t = expr_last (TREE_OPERAND (t, 0));
9435 else if (TREE_CODE (t) == STATEMENT_LIST)
9436 t = expr_last (t);
9437 else
9438 break;
9439 }
9440 if (TREE_CODE (t) == MODIFY_EXPR
9441 && TREE_OPERAND (t, 0) == temp)
9442 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9443
9444 return 0;
9445 }
9446
9447 case CALL_EXPR:
9448 {
9449 tree fndecl = get_callee_fndecl (t);
9450 tree arglist = TREE_OPERAND (t, 1);
9451 if (fndecl
9452 && DECL_BUILT_IN (fndecl)
9453 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
9454 switch (DECL_FUNCTION_CODE (fndecl))
9455 {
9456 #define CASE_BUILTIN_F(BUILT_IN_FN) \
9457 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
9458 #define CASE_BUILTIN_I(BUILT_IN_FN) \
9459 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
9460
9461 CASE_BUILTIN_F (BUILT_IN_ACOS)
9462 CASE_BUILTIN_F (BUILT_IN_ACOSH)
9463 CASE_BUILTIN_F (BUILT_IN_CABS)
9464 CASE_BUILTIN_F (BUILT_IN_COSH)
9465 CASE_BUILTIN_F (BUILT_IN_ERFC)
9466 CASE_BUILTIN_F (BUILT_IN_EXP)
9467 CASE_BUILTIN_F (BUILT_IN_EXP10)
9468 CASE_BUILTIN_F (BUILT_IN_EXP2)
9469 CASE_BUILTIN_F (BUILT_IN_FABS)
9470 CASE_BUILTIN_F (BUILT_IN_FDIM)
9471 CASE_BUILTIN_F (BUILT_IN_FREXP)
9472 CASE_BUILTIN_F (BUILT_IN_HYPOT)
9473 CASE_BUILTIN_F (BUILT_IN_POW10)
9474 CASE_BUILTIN_I (BUILT_IN_FFS)
9475 CASE_BUILTIN_I (BUILT_IN_PARITY)
9476 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
9477 /* Always true. */
9478 return 1;
9479
9480 CASE_BUILTIN_F (BUILT_IN_SQRT)
9481 /* sqrt(-0.0) is -0.0. */
9482 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
9483 return 1;
9484 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9485
9486 CASE_BUILTIN_F (BUILT_IN_ASINH)
9487 CASE_BUILTIN_F (BUILT_IN_ATAN)
9488 CASE_BUILTIN_F (BUILT_IN_ATANH)
9489 CASE_BUILTIN_F (BUILT_IN_CBRT)
9490 CASE_BUILTIN_F (BUILT_IN_CEIL)
9491 CASE_BUILTIN_F (BUILT_IN_ERF)
9492 CASE_BUILTIN_F (BUILT_IN_EXPM1)
9493 CASE_BUILTIN_F (BUILT_IN_FLOOR)
9494 CASE_BUILTIN_F (BUILT_IN_FMOD)
9495 CASE_BUILTIN_F (BUILT_IN_LDEXP)
9496 CASE_BUILTIN_F (BUILT_IN_LLRINT)
9497 CASE_BUILTIN_F (BUILT_IN_LLROUND)
9498 CASE_BUILTIN_F (BUILT_IN_LRINT)
9499 CASE_BUILTIN_F (BUILT_IN_LROUND)
9500 CASE_BUILTIN_F (BUILT_IN_MODF)
9501 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
9502 CASE_BUILTIN_F (BUILT_IN_POW)
9503 CASE_BUILTIN_F (BUILT_IN_RINT)
9504 CASE_BUILTIN_F (BUILT_IN_ROUND)
9505 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
9506 CASE_BUILTIN_F (BUILT_IN_SINH)
9507 CASE_BUILTIN_F (BUILT_IN_TANH)
9508 CASE_BUILTIN_F (BUILT_IN_TRUNC)
9509 /* True if the 1st argument is nonnegative. */
9510 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9511
9512 CASE_BUILTIN_F (BUILT_IN_FMAX)
9513 /* True if the 1st OR 2nd arguments are nonnegative. */
9514 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9515 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9516
9517 CASE_BUILTIN_F (BUILT_IN_FMIN)
9518 /* True if the 1st AND 2nd arguments are nonnegative. */
9519 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9520 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9521
9522 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
9523 /* True if the 2nd argument is nonnegative. */
9524 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9525
9526 default:
9527 break;
9528 #undef CASE_BUILTIN_F
9529 #undef CASE_BUILTIN_I
9530 }
9531 }
9532
9533 /* ... fall through ... */
9534
9535 default:
9536 if (truth_value_p (TREE_CODE (t)))
9537 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9538 return 1;
9539 }
9540
9541 /* We don't know sign of `t', so be conservative and return false. */
9542 return 0;
9543 }
9544
9545 /* Return true when T is an address and is known to be nonzero.
9546 For floating point we further ensure that T is not denormal.
9547 Similar logic is present in nonzero_address in rtlanal.h */
9548
9549 static bool
9550 tree_expr_nonzero_p (tree t)
9551 {
9552 tree type = TREE_TYPE (t);
9553
9554 /* Doing something useful for floating point would need more work. */
9555 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9556 return false;
9557
9558 switch (TREE_CODE (t))
9559 {
9560 case ABS_EXPR:
9561 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9562 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9563
9564 case INTEGER_CST:
9565 return !integer_zerop (t);
9566
9567 case PLUS_EXPR:
9568 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9569 {
9570 /* With the presence of negative values it is hard
9571 to say something. */
9572 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9573 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9574 return false;
9575 /* One of operands must be positive and the other non-negative. */
9576 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9577 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9578 }
9579 break;
9580
9581 case MULT_EXPR:
9582 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9583 {
9584 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9585 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9586 }
9587 break;
9588
9589 case NOP_EXPR:
9590 {
9591 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9592 tree outer_type = TREE_TYPE (t);
9593
9594 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
9595 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
9596 }
9597 break;
9598
9599 case ADDR_EXPR:
9600 /* Weak declarations may link to NULL. */
9601 if (DECL_P (TREE_OPERAND (t, 0)))
9602 return !DECL_WEAK (TREE_OPERAND (t, 0));
9603 /* Constants and all other cases are never weak. */
9604 return true;
9605
9606 case COND_EXPR:
9607 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9608 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
9609
9610 case MIN_EXPR:
9611 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9612 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9613
9614 case MAX_EXPR:
9615 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
9616 {
9617 /* When both operands are nonzero, then MAX must be too. */
9618 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
9619 return true;
9620
9621 /* MAX where operand 0 is positive is positive. */
9622 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9623 }
9624 /* MAX where operand 1 is positive is positive. */
9625 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9626 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9627 return true;
9628 break;
9629
9630 case COMPOUND_EXPR:
9631 case MODIFY_EXPR:
9632 case BIND_EXPR:
9633 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
9634
9635 case SAVE_EXPR:
9636 case NON_LVALUE_EXPR:
9637 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9638
9639 case BIT_IOR_EXPR:
9640 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9641 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9642
9643 default:
9644 break;
9645 }
9646 return false;
9647 }
9648
9649 /* Return true if `r' is known to be non-negative.
9650 Only handles constants at the moment. */
9651
9652 int
9653 rtl_expr_nonnegative_p (rtx r)
9654 {
9655 switch (GET_CODE (r))
9656 {
9657 case CONST_INT:
9658 return INTVAL (r) >= 0;
9659
9660 case CONST_DOUBLE:
9661 if (GET_MODE (r) == VOIDmode)
9662 return CONST_DOUBLE_HIGH (r) >= 0;
9663 return 0;
9664
9665 case CONST_VECTOR:
9666 {
9667 int units, i;
9668 rtx elt;
9669
9670 units = CONST_VECTOR_NUNITS (r);
9671
9672 for (i = 0; i < units; ++i)
9673 {
9674 elt = CONST_VECTOR_ELT (r, i);
9675 if (!rtl_expr_nonnegative_p (elt))
9676 return 0;
9677 }
9678
9679 return 1;
9680 }
9681
9682 case SYMBOL_REF:
9683 case LABEL_REF:
9684 /* These are always nonnegative. */
9685 return 1;
9686
9687 default:
9688 return 0;
9689 }
9690 }
9691
9692
9693 /* See if we are applying CODE, a relational to the highest or lowest
9694 possible integer of TYPE. If so, then the result is a compile
9695 time constant. */
9696
9697 static tree
9698 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
9699 tree *op1_p)
9700 {
9701 tree op0 = *op0_p;
9702 tree op1 = *op1_p;
9703 enum tree_code code = *code_p;
9704 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
9705
9706 if (TREE_CODE (op1) == INTEGER_CST
9707 && ! TREE_CONSTANT_OVERFLOW (op1)
9708 && width <= HOST_BITS_PER_WIDE_INT
9709 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
9710 || POINTER_TYPE_P (TREE_TYPE (op1))))
9711 {
9712 unsigned HOST_WIDE_INT signed_max;
9713 unsigned HOST_WIDE_INT max, min;
9714
9715 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
9716
9717 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
9718 {
9719 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9720 min = 0;
9721 }
9722 else
9723 {
9724 max = signed_max;
9725 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9726 }
9727
9728 if (TREE_INT_CST_HIGH (op1) == 0
9729 && TREE_INT_CST_LOW (op1) == max)
9730 switch (code)
9731 {
9732 case GT_EXPR:
9733 return omit_one_operand (type, integer_zero_node, op0);
9734
9735 case GE_EXPR:
9736 *code_p = EQ_EXPR;
9737 break;
9738 case LE_EXPR:
9739 return omit_one_operand (type, integer_one_node, op0);
9740
9741 case LT_EXPR:
9742 *code_p = NE_EXPR;
9743 break;
9744
9745 /* The GE_EXPR and LT_EXPR cases above are not normally
9746 reached because of previous transformations. */
9747
9748 default:
9749 break;
9750 }
9751 else if (TREE_INT_CST_HIGH (op1) == 0
9752 && TREE_INT_CST_LOW (op1) == max - 1)
9753 switch (code)
9754 {
9755 case GT_EXPR:
9756 *code_p = EQ_EXPR;
9757 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9758 break;
9759 case LE_EXPR:
9760 *code_p = NE_EXPR;
9761 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9762 break;
9763 default:
9764 break;
9765 }
9766 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9767 && TREE_INT_CST_LOW (op1) == min)
9768 switch (code)
9769 {
9770 case LT_EXPR:
9771 return omit_one_operand (type, integer_zero_node, op0);
9772
9773 case LE_EXPR:
9774 *code_p = EQ_EXPR;
9775 break;
9776
9777 case GE_EXPR:
9778 return omit_one_operand (type, integer_one_node, op0);
9779
9780 case GT_EXPR:
9781 *code_p = NE_EXPR;
9782 break;
9783
9784 default:
9785 break;
9786 }
9787 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9788 && TREE_INT_CST_LOW (op1) == min + 1)
9789 switch (code)
9790 {
9791 case GE_EXPR:
9792 *code_p = NE_EXPR;
9793 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9794 break;
9795 case LT_EXPR:
9796 *code_p = EQ_EXPR;
9797 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9798 break;
9799 default:
9800 break;
9801 }
9802
9803 else if (TREE_INT_CST_HIGH (op1) == 0
9804 && TREE_INT_CST_LOW (op1) == signed_max
9805 && TYPE_UNSIGNED (TREE_TYPE (op1))
9806 /* signed_type does not work on pointer types. */
9807 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
9808 {
9809 /* The following case also applies to X < signed_max+1
9810 and X >= signed_max+1 because previous transformations. */
9811 if (code == LE_EXPR || code == GT_EXPR)
9812 {
9813 tree st0, st1, exp, retval;
9814 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
9815 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
9816
9817 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9818 type,
9819 fold_convert (st0, op0),
9820 fold_convert (st1, integer_zero_node));
9821
9822 retval
9823 = nondestructive_fold_binary_to_constant (TREE_CODE (exp),
9824 TREE_TYPE (exp),
9825 TREE_OPERAND (exp, 0),
9826 TREE_OPERAND (exp, 1));
9827
9828 /* If we are in gimple form, then returning EXP would create
9829 non-gimple expressions. Clearing it is safe and insures
9830 we do not allow a non-gimple expression to escape. */
9831 if (in_gimple_form)
9832 exp = NULL;
9833
9834 return (retval ? retval : exp);
9835 }
9836 }
9837 }
9838
9839 return NULL_TREE;
9840 }
9841
9842
9843 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
9844 attempt to fold the expression to a constant without modifying TYPE,
9845 OP0 or OP1.
9846
9847 If the expression could be simplified to a constant, then return
9848 the constant. If the expression would not be simplified to a
9849 constant, then return NULL_TREE.
9850
9851 Note this is primarily designed to be called after gimplification
9852 of the tree structures and when at least one operand is a constant.
9853 As a result of those simplifying assumptions this routine is far
9854 simpler than the generic fold routine. */
9855
9856 tree
9857 nondestructive_fold_binary_to_constant (enum tree_code code, tree type,
9858 tree op0, tree op1)
9859 {
9860 int wins = 1;
9861 tree subop0;
9862 tree subop1;
9863 tree tem;
9864
9865 /* If this is a commutative operation, and ARG0 is a constant, move it
9866 to ARG1 to reduce the number of tests below. */
9867 if (commutative_tree_code (code)
9868 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
9869 {
9870 tem = op0;
9871 op0 = op1;
9872 op1 = tem;
9873 }
9874
9875 /* If either operand is a complex type, extract its real component. */
9876 if (TREE_CODE (op0) == COMPLEX_CST)
9877 subop0 = TREE_REALPART (op0);
9878 else
9879 subop0 = op0;
9880
9881 if (TREE_CODE (op1) == COMPLEX_CST)
9882 subop1 = TREE_REALPART (op1);
9883 else
9884 subop1 = op1;
9885
9886 /* Note if either argument is not a real or integer constant.
9887 With a few exceptions, simplification is limited to cases
9888 where both arguments are constants. */
9889 if ((TREE_CODE (subop0) != INTEGER_CST
9890 && TREE_CODE (subop0) != REAL_CST)
9891 || (TREE_CODE (subop1) != INTEGER_CST
9892 && TREE_CODE (subop1) != REAL_CST))
9893 wins = 0;
9894
9895 switch (code)
9896 {
9897 case PLUS_EXPR:
9898 /* (plus (address) (const_int)) is a constant. */
9899 if (TREE_CODE (op0) == PLUS_EXPR
9900 && TREE_CODE (op1) == INTEGER_CST
9901 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
9902 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
9903 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
9904 == ADDR_EXPR)))
9905 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9906 {
9907 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
9908 const_binop (PLUS_EXPR, op1,
9909 TREE_OPERAND (op0, 1), 0));
9910 }
9911 case BIT_XOR_EXPR:
9912
9913 binary:
9914 if (!wins)
9915 return NULL_TREE;
9916
9917 /* Both arguments are constants. Simplify. */
9918 tem = const_binop (code, op0, op1, 0);
9919 if (tem != NULL_TREE)
9920 {
9921 /* The return value should always have the same type as
9922 the original expression. */
9923 if (TREE_TYPE (tem) != type)
9924 tem = fold_convert (type, tem);
9925
9926 return tem;
9927 }
9928 return NULL_TREE;
9929
9930 case MINUS_EXPR:
9931 /* Fold &x - &x. This can happen from &x.foo - &x.
9932 This is unsafe for certain floats even in non-IEEE formats.
9933 In IEEE, it is unsafe because it does wrong for NaNs.
9934 Also note that operand_equal_p is always false if an
9935 operand is volatile. */
9936 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
9937 return fold_convert (type, integer_zero_node);
9938
9939 goto binary;
9940
9941 case MULT_EXPR:
9942 case BIT_AND_EXPR:
9943 /* Special case multiplication or bitwise AND where one argument
9944 is zero. */
9945 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
9946 return omit_one_operand (type, op1, op0);
9947 else
9948 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
9949 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
9950 && real_zerop (op1))
9951 return omit_one_operand (type, op1, op0);
9952
9953 goto binary;
9954
9955 case BIT_IOR_EXPR:
9956 /* Special case when we know the result will be all ones. */
9957 if (integer_all_onesp (op1))
9958 return omit_one_operand (type, op1, op0);
9959
9960 goto binary;
9961
9962 case TRUNC_DIV_EXPR:
9963 case ROUND_DIV_EXPR:
9964 case FLOOR_DIV_EXPR:
9965 case CEIL_DIV_EXPR:
9966 case EXACT_DIV_EXPR:
9967 case TRUNC_MOD_EXPR:
9968 case ROUND_MOD_EXPR:
9969 case FLOOR_MOD_EXPR:
9970 case CEIL_MOD_EXPR:
9971 case RDIV_EXPR:
9972 /* Division by zero is undefined. */
9973 if (integer_zerop (op1))
9974 return NULL_TREE;
9975
9976 if (TREE_CODE (op1) == REAL_CST
9977 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
9978 && real_zerop (op1))
9979 return NULL_TREE;
9980
9981 goto binary;
9982
9983 case MIN_EXPR:
9984 if (INTEGRAL_TYPE_P (type)
9985 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
9986 return omit_one_operand (type, op1, op0);
9987
9988 goto binary;
9989
9990 case MAX_EXPR:
9991 if (INTEGRAL_TYPE_P (type)
9992 && TYPE_MAX_VALUE (type)
9993 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
9994 return omit_one_operand (type, op1, op0);
9995
9996 goto binary;
9997
9998 case RSHIFT_EXPR:
9999 /* Optimize -1 >> x for arithmetic right shifts. */
10000 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
10001 return omit_one_operand (type, op0, op1);
10002 /* ... fall through ... */
10003
10004 case LSHIFT_EXPR:
10005 if (integer_zerop (op0))
10006 return omit_one_operand (type, op0, op1);
10007
10008 /* Since negative shift count is not well-defined, don't
10009 try to compute it in the compiler. */
10010 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
10011 return NULL_TREE;
10012
10013 goto binary;
10014
10015 case LROTATE_EXPR:
10016 case RROTATE_EXPR:
10017 /* -1 rotated either direction by any amount is still -1. */
10018 if (integer_all_onesp (op0))
10019 return omit_one_operand (type, op0, op1);
10020
10021 /* 0 rotated either direction by any amount is still zero. */
10022 if (integer_zerop (op0))
10023 return omit_one_operand (type, op0, op1);
10024
10025 goto binary;
10026
10027 case COMPLEX_EXPR:
10028 if (wins)
10029 return build_complex (type, op0, op1);
10030 return NULL_TREE;
10031
10032 case LT_EXPR:
10033 case LE_EXPR:
10034 case GT_EXPR:
10035 case GE_EXPR:
10036 case EQ_EXPR:
10037 case NE_EXPR:
10038 /* If one arg is a real or integer constant, put it last. */
10039 if ((TREE_CODE (op0) == INTEGER_CST
10040 && TREE_CODE (op1) != INTEGER_CST)
10041 || (TREE_CODE (op0) == REAL_CST
10042 && TREE_CODE (op0) != REAL_CST))
10043 {
10044 tree temp;
10045
10046 temp = op0;
10047 op0 = op1;
10048 op1 = temp;
10049 code = swap_tree_comparison (code);
10050 }
10051
10052 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10053 This transformation affects the cases which are handled in later
10054 optimizations involving comparisons with non-negative constants. */
10055 if (TREE_CODE (op1) == INTEGER_CST
10056 && TREE_CODE (op0) != INTEGER_CST
10057 && tree_int_cst_sgn (op1) > 0)
10058 {
10059 switch (code)
10060 {
10061 case GE_EXPR:
10062 code = GT_EXPR;
10063 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10064 break;
10065
10066 case LT_EXPR:
10067 code = LE_EXPR;
10068 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10069 break;
10070
10071 default:
10072 break;
10073 }
10074 }
10075
10076 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
10077 if (tem)
10078 return tem;
10079
10080 /* Fall through. */
10081
10082 case ORDERED_EXPR:
10083 case UNORDERED_EXPR:
10084 case UNLT_EXPR:
10085 case UNLE_EXPR:
10086 case UNGT_EXPR:
10087 case UNGE_EXPR:
10088 case UNEQ_EXPR:
10089 case LTGT_EXPR:
10090 if (!wins)
10091 return NULL_TREE;
10092
10093 return fold_relational_const (code, type, op0, op1);
10094
10095 case RANGE_EXPR:
10096 /* This could probably be handled. */
10097 return NULL_TREE;
10098
10099 case TRUTH_AND_EXPR:
10100 /* If second arg is constant zero, result is zero, but first arg
10101 must be evaluated. */
10102 if (integer_zerop (op1))
10103 return omit_one_operand (type, op1, op0);
10104 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10105 case will be handled here. */
10106 if (integer_zerop (op0))
10107 return omit_one_operand (type, op0, op1);
10108 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10109 return constant_boolean_node (true, type);
10110 return NULL_TREE;
10111
10112 case TRUTH_OR_EXPR:
10113 /* If second arg is constant true, result is true, but we must
10114 evaluate first arg. */
10115 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
10116 return omit_one_operand (type, op1, op0);
10117 /* Likewise for first arg, but note this only occurs here for
10118 TRUTH_OR_EXPR. */
10119 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
10120 return omit_one_operand (type, op0, op1);
10121 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10122 return constant_boolean_node (false, type);
10123 return NULL_TREE;
10124
10125 case TRUTH_XOR_EXPR:
10126 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10127 {
10128 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
10129 return constant_boolean_node (x, type);
10130 }
10131 return NULL_TREE;
10132
10133 default:
10134 return NULL_TREE;
10135 }
10136 }
10137
10138 /* Given the components of a unary expression CODE, TYPE and OP0,
10139 attempt to fold the expression to a constant without modifying
10140 TYPE or OP0.
10141
10142 If the expression could be simplified to a constant, then return
10143 the constant. If the expression would not be simplified to a
10144 constant, then return NULL_TREE.
10145
10146 Note this is primarily designed to be called after gimplification
10147 of the tree structures and when op0 is a constant. As a result
10148 of those simplifying assumptions this routine is far simpler than
10149 the generic fold routine. */
10150
10151 tree
10152 nondestructive_fold_unary_to_constant (enum tree_code code, tree type,
10153 tree op0)
10154 {
10155 /* Make sure we have a suitable constant argument. */
10156 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
10157 {
10158 tree subop;
10159
10160 if (TREE_CODE (op0) == COMPLEX_CST)
10161 subop = TREE_REALPART (op0);
10162 else
10163 subop = op0;
10164
10165 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
10166 return NULL_TREE;
10167 }
10168
10169 switch (code)
10170 {
10171 case NOP_EXPR:
10172 case FLOAT_EXPR:
10173 case CONVERT_EXPR:
10174 case FIX_TRUNC_EXPR:
10175 case FIX_FLOOR_EXPR:
10176 case FIX_CEIL_EXPR:
10177 return fold_convert_const (code, type, op0);
10178
10179 case NEGATE_EXPR:
10180 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10181 return fold_negate_const (op0, type);
10182 else
10183 return NULL_TREE;
10184
10185 case ABS_EXPR:
10186 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10187 return fold_abs_const (op0, type);
10188 else
10189 return NULL_TREE;
10190
10191 case BIT_NOT_EXPR:
10192 if (TREE_CODE (op0) == INTEGER_CST)
10193 return fold_not_const (op0, type);
10194 else
10195 return NULL_TREE;
10196
10197 case REALPART_EXPR:
10198 if (TREE_CODE (op0) == COMPLEX_CST)
10199 return TREE_REALPART (op0);
10200 else
10201 return NULL_TREE;
10202
10203 case IMAGPART_EXPR:
10204 if (TREE_CODE (op0) == COMPLEX_CST)
10205 return TREE_IMAGPART (op0);
10206 else
10207 return NULL_TREE;
10208
10209 case CONJ_EXPR:
10210 if (TREE_CODE (op0) == COMPLEX_CST
10211 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
10212 return build_complex (type, TREE_REALPART (op0),
10213 negate_expr (TREE_IMAGPART (op0)));
10214 return NULL_TREE;
10215
10216 default:
10217 return NULL_TREE;
10218 }
10219 }
10220
10221 /* If EXP represents referencing an element in a constant string
10222 (either via pointer arithmetic or array indexing), return the
10223 tree representing the value accessed, otherwise return NULL. */
10224
10225 tree
10226 fold_read_from_constant_string (tree exp)
10227 {
10228 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
10229 {
10230 tree exp1 = TREE_OPERAND (exp, 0);
10231 tree index;
10232 tree string;
10233
10234 if (TREE_CODE (exp) == INDIRECT_REF)
10235 string = string_constant (exp1, &index);
10236 else
10237 {
10238 tree low_bound = array_ref_low_bound (exp);
10239 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
10240
10241 /* Optimize the special-case of a zero lower bound.
10242
10243 We convert the low_bound to sizetype to avoid some problems
10244 with constant folding. (E.g. suppose the lower bound is 1,
10245 and its mode is QI. Without the conversion,l (ARRAY
10246 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
10247 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
10248 if (! integer_zerop (low_bound))
10249 index = size_diffop (index, fold_convert (sizetype, low_bound));
10250
10251 string = exp1;
10252 }
10253
10254 if (string
10255 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
10256 && TREE_CODE (string) == STRING_CST
10257 && TREE_CODE (index) == INTEGER_CST
10258 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
10259 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
10260 == MODE_INT)
10261 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
10262 return fold_convert (TREE_TYPE (exp),
10263 build_int_2 ((TREE_STRING_POINTER (string)
10264 [TREE_INT_CST_LOW (index)]), 0));
10265 }
10266 return NULL;
10267 }
10268
10269 /* Return the tree for neg (ARG0) when ARG0 is known to be either
10270 an integer constant or real constant.
10271
10272 TYPE is the type of the result. */
10273
10274 static tree
10275 fold_negate_const (tree arg0, tree type)
10276 {
10277 tree t = NULL_TREE;
10278
10279 if (TREE_CODE (arg0) == INTEGER_CST)
10280 {
10281 unsigned HOST_WIDE_INT low;
10282 HOST_WIDE_INT high;
10283 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10284 TREE_INT_CST_HIGH (arg0),
10285 &low, &high);
10286 t = build_int_2 (low, high);
10287 TREE_TYPE (t) = type;
10288 TREE_OVERFLOW (t)
10289 = (TREE_OVERFLOW (arg0)
10290 | force_fit_type (t, overflow && !TYPE_UNSIGNED (type)));
10291 TREE_CONSTANT_OVERFLOW (t)
10292 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
10293 }
10294 else if (TREE_CODE (arg0) == REAL_CST)
10295 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10296 #ifdef ENABLE_CHECKING
10297 else
10298 abort ();
10299 #endif
10300
10301 return t;
10302 }
10303
10304 /* Return the tree for abs (ARG0) when ARG0 is known to be either
10305 an integer constant or real constant.
10306
10307 TYPE is the type of the result. */
10308
10309 tree
10310 fold_abs_const (tree arg0, tree type)
10311 {
10312 tree t = NULL_TREE;
10313
10314 if (TREE_CODE (arg0) == INTEGER_CST)
10315 {
10316 /* If the value is unsigned, then the absolute value is
10317 the same as the ordinary value. */
10318 if (TYPE_UNSIGNED (type))
10319 return arg0;
10320 /* Similarly, if the value is non-negative. */
10321 else if (INT_CST_LT (integer_minus_one_node, arg0))
10322 return arg0;
10323 /* If the value is negative, then the absolute value is
10324 its negation. */
10325 else
10326 {
10327 unsigned HOST_WIDE_INT low;
10328 HOST_WIDE_INT high;
10329 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10330 TREE_INT_CST_HIGH (arg0),
10331 &low, &high);
10332 t = build_int_2 (low, high);
10333 TREE_TYPE (t) = type;
10334 TREE_OVERFLOW (t)
10335 = (TREE_OVERFLOW (arg0)
10336 | force_fit_type (t, overflow));
10337 TREE_CONSTANT_OVERFLOW (t)
10338 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
10339 return t;
10340 }
10341 }
10342 else if (TREE_CODE (arg0) == REAL_CST)
10343 {
10344 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
10345 return build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10346 else
10347 return arg0;
10348 }
10349 #ifdef ENABLE_CHECKING
10350 else
10351 abort ();
10352 #endif
10353
10354 return t;
10355 }
10356
10357 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
10358 constant. TYPE is the type of the result. */
10359
10360 static tree
10361 fold_not_const (tree arg0, tree type)
10362 {
10363 tree t = NULL_TREE;
10364
10365 if (TREE_CODE (arg0) == INTEGER_CST)
10366 {
10367 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
10368 ~ TREE_INT_CST_HIGH (arg0));
10369 TREE_TYPE (t) = type;
10370 force_fit_type (t, 0);
10371 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
10372 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
10373 }
10374 #ifdef ENABLE_CHECKING
10375 else
10376 abort ();
10377 #endif
10378
10379 return t;
10380 }
10381
10382 /* Given CODE, a relational operator, the target type, TYPE and two
10383 constant operands OP0 and OP1, return the result of the
10384 relational operation. If the result is not a compile time
10385 constant, then return NULL_TREE. */
10386
10387 static tree
10388 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
10389 {
10390 int result, invert;
10391
10392 /* From here on, the only cases we handle are when the result is
10393 known to be a constant. */
10394
10395 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
10396 {
10397 /* Handle the cases where either operand is a NaN. */
10398 if (REAL_VALUE_ISNAN (TREE_REAL_CST (op0))
10399 || REAL_VALUE_ISNAN (TREE_REAL_CST (op1)))
10400 {
10401 switch (code)
10402 {
10403 case EQ_EXPR:
10404 case ORDERED_EXPR:
10405 result = 0;
10406 break;
10407
10408 case NE_EXPR:
10409 case UNORDERED_EXPR:
10410 case UNLT_EXPR:
10411 case UNLE_EXPR:
10412 case UNGT_EXPR:
10413 case UNGE_EXPR:
10414 case UNEQ_EXPR:
10415 result = 1;
10416 break;
10417
10418 case LT_EXPR:
10419 case LE_EXPR:
10420 case GT_EXPR:
10421 case GE_EXPR:
10422 case LTGT_EXPR:
10423 if (flag_trapping_math)
10424 return NULL_TREE;
10425 result = 0;
10426 break;
10427
10428 default:
10429 abort ();
10430 }
10431
10432 return constant_boolean_node (result, type);
10433 }
10434
10435 /* From here on we're sure there are no NaNs. */
10436 switch (code)
10437 {
10438 case ORDERED_EXPR:
10439 return constant_boolean_node (true, type);
10440
10441 case UNORDERED_EXPR:
10442 return constant_boolean_node (false, type);
10443
10444 case UNLT_EXPR:
10445 code = LT_EXPR;
10446 break;
10447 case UNLE_EXPR:
10448 code = LE_EXPR;
10449 break;
10450 case UNGT_EXPR:
10451 code = GT_EXPR;
10452 break;
10453 case UNGE_EXPR:
10454 code = GE_EXPR;
10455 break;
10456 case UNEQ_EXPR:
10457 code = EQ_EXPR;
10458 break;
10459 case LTGT_EXPR:
10460 code = NE_EXPR;
10461 break;
10462
10463 default:
10464 break;
10465 }
10466 }
10467
10468 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
10469
10470 To compute GT, swap the arguments and do LT.
10471 To compute GE, do LT and invert the result.
10472 To compute LE, swap the arguments, do LT and invert the result.
10473 To compute NE, do EQ and invert the result.
10474
10475 Therefore, the code below must handle only EQ and LT. */
10476
10477 if (code == LE_EXPR || code == GT_EXPR)
10478 {
10479 tree tem = op0;
10480 op0 = op1;
10481 op1 = tem;
10482 code = swap_tree_comparison (code);
10483 }
10484
10485 /* Note that it is safe to invert for real values here because we
10486 have already handled the one case that it matters. */
10487
10488 invert = 0;
10489 if (code == NE_EXPR || code == GE_EXPR)
10490 {
10491 invert = 1;
10492 code = invert_tree_comparison (code, false);
10493 }
10494
10495 /* Compute a result for LT or EQ if args permit;
10496 Otherwise return T. */
10497 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10498 {
10499 if (code == EQ_EXPR)
10500 result = tree_int_cst_equal (op0, op1);
10501 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
10502 result = INT_CST_LT_UNSIGNED (op0, op1);
10503 else
10504 result = INT_CST_LT (op0, op1);
10505 }
10506
10507 else if (code == EQ_EXPR && !TREE_SIDE_EFFECTS (op0)
10508 && integer_zerop (op1) && tree_expr_nonzero_p (op0))
10509 result = 0;
10510
10511 /* Two real constants can be compared explicitly. */
10512 else if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
10513 {
10514 if (code == EQ_EXPR)
10515 result = REAL_VALUES_EQUAL (TREE_REAL_CST (op0),
10516 TREE_REAL_CST (op1));
10517 else
10518 result = REAL_VALUES_LESS (TREE_REAL_CST (op0),
10519 TREE_REAL_CST (op1));
10520 }
10521 else
10522 return NULL_TREE;
10523
10524 if (invert)
10525 result ^= 1;
10526 return constant_boolean_node (result, type);
10527 }
10528
10529 /* Build an expression for the address of T. Folds away INDIRECT_REF to
10530 avoid confusing the gimplify process. */
10531
10532 tree
10533 build_fold_addr_expr_with_type (tree t, tree ptrtype)
10534 {
10535 if (TREE_CODE (t) == INDIRECT_REF)
10536 {
10537 t = TREE_OPERAND (t, 0);
10538 if (TREE_TYPE (t) != ptrtype)
10539 t = build1 (NOP_EXPR, ptrtype, t);
10540 }
10541 else
10542 {
10543 tree base = t;
10544
10545 while (handled_component_p (base)
10546 || TREE_CODE (base) == REALPART_EXPR
10547 || TREE_CODE (base) == IMAGPART_EXPR)
10548 base = TREE_OPERAND (base, 0);
10549 if (DECL_P (base))
10550 TREE_ADDRESSABLE (base) = 1;
10551
10552 t = build1 (ADDR_EXPR, ptrtype, t);
10553 }
10554
10555 return t;
10556 }
10557
10558 tree
10559 build_fold_addr_expr (tree t)
10560 {
10561 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
10562 }
10563
10564 /* Builds an expression for an indirection through T, simplifying some
10565 cases. */
10566
10567 tree
10568 build_fold_indirect_ref (tree t)
10569 {
10570 tree type = TREE_TYPE (TREE_TYPE (t));
10571 tree sub = t;
10572 tree subtype;
10573
10574 STRIP_NOPS (sub);
10575 if (TREE_CODE (sub) == ADDR_EXPR)
10576 {
10577 tree op = TREE_OPERAND (sub, 0);
10578 tree optype = TREE_TYPE (op);
10579 /* *&p => p */
10580 if (lang_hooks.types_compatible_p (type, optype))
10581 return op;
10582 /* *(foo *)&fooarray => fooarray[0] */
10583 else if (TREE_CODE (optype) == ARRAY_TYPE
10584 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
10585 return build4 (ARRAY_REF, type, op, size_zero_node, NULL_TREE, NULL_TREE);
10586 }
10587
10588 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
10589 subtype = TREE_TYPE (sub);
10590 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
10591 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
10592 {
10593 sub = build_fold_indirect_ref (sub);
10594 return build4 (ARRAY_REF, type, sub, size_zero_node, NULL_TREE, NULL_TREE);
10595 }
10596
10597 return build1 (INDIRECT_REF, type, t);
10598 }
10599
10600 /* Strip non-trapping, non-side-effecting tree nodes from an expression
10601 whose result is ignored. The type of the returned tree need not be
10602 the same as the original expression. */
10603
10604 tree
10605 fold_ignored_result (tree t)
10606 {
10607 if (!TREE_SIDE_EFFECTS (t))
10608 return integer_zero_node;
10609
10610 for (;;)
10611 switch (TREE_CODE_CLASS (TREE_CODE (t)))
10612 {
10613 case '1':
10614 t = TREE_OPERAND (t, 0);
10615 break;
10616
10617 case '2':
10618 case '<':
10619 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
10620 t = TREE_OPERAND (t, 0);
10621 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
10622 t = TREE_OPERAND (t, 1);
10623 else
10624 return t;
10625 break;
10626
10627 case 'e':
10628 switch (TREE_CODE (t))
10629 {
10630 case COMPOUND_EXPR:
10631 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
10632 return t;
10633 t = TREE_OPERAND (t, 0);
10634 break;
10635
10636 case COND_EXPR:
10637 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
10638 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
10639 return t;
10640 t = TREE_OPERAND (t, 0);
10641 break;
10642
10643 default:
10644 return t;
10645 }
10646 break;
10647
10648 default:
10649 return t;
10650 }
10651 }
10652
10653 #include "gt-fold-const.h"