fold-const.c (make_range): Cleanup type checking through function.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "real.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "toplev.h"
56 #include "ggc.h"
57 #include "hashtab.h"
58 #include "langhooks.h"
59 #include "md5.h"
60
61 /* The following constants represent a bit based encoding of GCC's
62 comparison operators. This encoding simplifies transformations
63 on relational comparison operators, such as AND and OR. */
64 enum comparison_code {
65 COMPCODE_FALSE = 0,
66 COMPCODE_LT = 1,
67 COMPCODE_EQ = 2,
68 COMPCODE_LE = 3,
69 COMPCODE_GT = 4,
70 COMPCODE_LTGT = 5,
71 COMPCODE_GE = 6,
72 COMPCODE_ORD = 7,
73 COMPCODE_UNORD = 8,
74 COMPCODE_UNLT = 9,
75 COMPCODE_UNEQ = 10,
76 COMPCODE_UNLE = 11,
77 COMPCODE_UNGT = 12,
78 COMPCODE_NE = 13,
79 COMPCODE_UNGE = 14,
80 COMPCODE_TRUE = 15
81 };
82
83 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
84 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
85 static bool negate_mathfn_p (enum built_in_function);
86 static bool negate_expr_p (tree);
87 static tree negate_expr (tree);
88 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
89 static tree associate_trees (tree, tree, enum tree_code, tree);
90 static tree const_binop (enum tree_code, tree, tree, int);
91 static hashval_t size_htab_hash (const void *);
92 static int size_htab_eq (const void *, const void *);
93 static tree fold_convert_const (enum tree_code, tree, tree);
94 static enum tree_code invert_tree_comparison (enum tree_code, bool);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static tree combine_comparisons (enum tree_code, enum tree_code,
98 enum tree_code, tree, tree, tree);
99 static int truth_value_p (enum tree_code);
100 static int operand_equal_for_comparison_p (tree, tree, tree);
101 static int twoval_comparison_p (tree, tree *, tree *, int *);
102 static tree eval_subst (tree, tree, tree, tree, tree);
103 static tree pedantic_omit_one_operand (tree, tree, tree);
104 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
105 static tree make_bit_field_ref (tree, tree, int, int, int);
106 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
107 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
109 tree *, tree *);
110 static int all_ones_mask_p (tree, int);
111 static tree sign_bit_p (tree, tree);
112 static int simple_operand_p (tree);
113 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
114 static tree make_range (tree, int *, tree *, tree *);
115 static tree build_range_check (tree, tree, int, tree, tree);
116 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
117 tree);
118 static tree fold_range_test (tree);
119 static tree fold_cond_expr_with_comparison (tree, tree, tree);
120 static tree unextend (tree, int, int, tree);
121 static tree fold_truthop (enum tree_code, tree, tree, tree);
122 static tree optimize_minmax_comparison (tree);
123 static tree extract_muldiv (tree, tree, enum tree_code, tree);
124 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
125 static int multiple_of_p (tree, tree, tree);
126 static tree constant_boolean_node (int, tree);
127 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
128 tree, int);
129 static bool fold_real_zero_addition_p (tree, tree, int);
130 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
131 tree, tree, tree);
132 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
133 static tree fold_div_compare (enum tree_code, tree, tree, tree);
134 static bool reorder_operands_p (tree, tree);
135 static tree fold_negate_const (tree, tree);
136 static tree fold_not_const (tree, tree);
137 static tree fold_relational_const (enum tree_code, tree, tree, tree);
138 static tree fold_relational_hi_lo (enum tree_code *, const tree,
139 tree *, tree *);
140
141 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
142 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
143 and SUM1. Then this yields nonzero if overflow occurred during the
144 addition.
145
146 Overflow occurs if A and B have the same sign, but A and SUM differ in
147 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
148 sign. */
149 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
150 \f
151 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
152 We do that by representing the two-word integer in 4 words, with only
153 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
154 number. The value of the word is LOWPART + HIGHPART * BASE. */
155
156 #define LOWPART(x) \
157 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
158 #define HIGHPART(x) \
159 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
160 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
161
162 /* Unpack a two-word integer into 4 words.
163 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
164 WORDS points to the array of HOST_WIDE_INTs. */
165
166 static void
167 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
168 {
169 words[0] = LOWPART (low);
170 words[1] = HIGHPART (low);
171 words[2] = LOWPART (hi);
172 words[3] = HIGHPART (hi);
173 }
174
175 /* Pack an array of 4 words into a two-word integer.
176 WORDS points to the array of words.
177 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
178
179 static void
180 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
181 HOST_WIDE_INT *hi)
182 {
183 *low = words[0] + words[1] * BASE;
184 *hi = words[2] + words[3] * BASE;
185 }
186 \f
187 /* Make the integer constant T valid for its type by setting to 0 or 1 all
188 the bits in the constant that don't belong in the type.
189
190 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
191 nonzero, a signed overflow has already occurred in calculating T, so
192 propagate it. */
193
194 int
195 force_fit_type (tree t, int overflow)
196 {
197 unsigned HOST_WIDE_INT low;
198 HOST_WIDE_INT high;
199 unsigned int prec;
200
201 if (TREE_CODE (t) == REAL_CST)
202 {
203 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
204 Consider doing it via real_convert now. */
205 return overflow;
206 }
207
208 else if (TREE_CODE (t) != INTEGER_CST)
209 return overflow;
210
211 low = TREE_INT_CST_LOW (t);
212 high = TREE_INT_CST_HIGH (t);
213
214 if (POINTER_TYPE_P (TREE_TYPE (t))
215 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
216 prec = POINTER_SIZE;
217 else
218 prec = TYPE_PRECISION (TREE_TYPE (t));
219
220 /* First clear all bits that are beyond the type's precision. */
221
222 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
223 ;
224 else if (prec > HOST_BITS_PER_WIDE_INT)
225 TREE_INT_CST_HIGH (t)
226 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
227 else
228 {
229 TREE_INT_CST_HIGH (t) = 0;
230 if (prec < HOST_BITS_PER_WIDE_INT)
231 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
232 }
233
234 /* Unsigned types do not suffer sign extension or overflow unless they
235 are a sizetype. */
236 if (TYPE_UNSIGNED (TREE_TYPE (t))
237 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
238 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
239 return overflow;
240
241 /* If the value's sign bit is set, extend the sign. */
242 if (prec != 2 * HOST_BITS_PER_WIDE_INT
243 && (prec > HOST_BITS_PER_WIDE_INT
244 ? 0 != (TREE_INT_CST_HIGH (t)
245 & ((HOST_WIDE_INT) 1
246 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
247 : 0 != (TREE_INT_CST_LOW (t)
248 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
249 {
250 /* Value is negative:
251 set to 1 all the bits that are outside this type's precision. */
252 if (prec > HOST_BITS_PER_WIDE_INT)
253 TREE_INT_CST_HIGH (t)
254 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
255 else
256 {
257 TREE_INT_CST_HIGH (t) = -1;
258 if (prec < HOST_BITS_PER_WIDE_INT)
259 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
260 }
261 }
262
263 /* Return nonzero if signed overflow occurred. */
264 return
265 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
266 != 0);
267 }
268 \f
269 /* Add two doubleword integers with doubleword result.
270 Each argument is given as two `HOST_WIDE_INT' pieces.
271 One argument is L1 and H1; the other, L2 and H2.
272 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
273
274 int
275 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
276 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
277 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
278 {
279 unsigned HOST_WIDE_INT l;
280 HOST_WIDE_INT h;
281
282 l = l1 + l2;
283 h = h1 + h2 + (l < l1);
284
285 *lv = l;
286 *hv = h;
287 return OVERFLOW_SUM_SIGN (h1, h2, h);
288 }
289
290 /* Negate a doubleword integer with doubleword result.
291 Return nonzero if the operation overflows, assuming it's signed.
292 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
293 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
294
295 int
296 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
297 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
298 {
299 if (l1 == 0)
300 {
301 *lv = 0;
302 *hv = - h1;
303 return (*hv & h1) < 0;
304 }
305 else
306 {
307 *lv = -l1;
308 *hv = ~h1;
309 return 0;
310 }
311 }
312 \f
313 /* Multiply two doubleword integers with doubleword result.
314 Return nonzero if the operation overflows, assuming it's signed.
315 Each argument is given as two `HOST_WIDE_INT' pieces.
316 One argument is L1 and H1; the other, L2 and H2.
317 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
318
319 int
320 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
321 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
322 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
323 {
324 HOST_WIDE_INT arg1[4];
325 HOST_WIDE_INT arg2[4];
326 HOST_WIDE_INT prod[4 * 2];
327 unsigned HOST_WIDE_INT carry;
328 int i, j, k;
329 unsigned HOST_WIDE_INT toplow, neglow;
330 HOST_WIDE_INT tophigh, neghigh;
331
332 encode (arg1, l1, h1);
333 encode (arg2, l2, h2);
334
335 memset (prod, 0, sizeof prod);
336
337 for (i = 0; i < 4; i++)
338 {
339 carry = 0;
340 for (j = 0; j < 4; j++)
341 {
342 k = i + j;
343 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
344 carry += arg1[i] * arg2[j];
345 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
346 carry += prod[k];
347 prod[k] = LOWPART (carry);
348 carry = HIGHPART (carry);
349 }
350 prod[i + 4] = carry;
351 }
352
353 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
354
355 /* Check for overflow by calculating the top half of the answer in full;
356 it should agree with the low half's sign bit. */
357 decode (prod + 4, &toplow, &tophigh);
358 if (h1 < 0)
359 {
360 neg_double (l2, h2, &neglow, &neghigh);
361 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
362 }
363 if (h2 < 0)
364 {
365 neg_double (l1, h1, &neglow, &neghigh);
366 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
367 }
368 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
369 }
370 \f
371 /* Shift the doubleword integer in L1, H1 left by COUNT places
372 keeping only PREC bits of result.
373 Shift right if COUNT is negative.
374 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
375 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
376
377 void
378 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
379 HOST_WIDE_INT count, unsigned int prec,
380 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
381 {
382 unsigned HOST_WIDE_INT signmask;
383
384 if (count < 0)
385 {
386 rshift_double (l1, h1, -count, prec, lv, hv, arith);
387 return;
388 }
389
390 if (SHIFT_COUNT_TRUNCATED)
391 count %= prec;
392
393 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
394 {
395 /* Shifting by the host word size is undefined according to the
396 ANSI standard, so we must handle this as a special case. */
397 *hv = 0;
398 *lv = 0;
399 }
400 else if (count >= HOST_BITS_PER_WIDE_INT)
401 {
402 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
403 *lv = 0;
404 }
405 else
406 {
407 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
408 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
409 *lv = l1 << count;
410 }
411
412 /* Sign extend all bits that are beyond the precision. */
413
414 signmask = -((prec > HOST_BITS_PER_WIDE_INT
415 ? ((unsigned HOST_WIDE_INT) *hv
416 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
417 : (*lv >> (prec - 1))) & 1);
418
419 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
420 ;
421 else if (prec >= HOST_BITS_PER_WIDE_INT)
422 {
423 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
424 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
425 }
426 else
427 {
428 *hv = signmask;
429 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
430 *lv |= signmask << prec;
431 }
432 }
433
434 /* Shift the doubleword integer in L1, H1 right by COUNT places
435 keeping only PREC bits of result. COUNT must be positive.
436 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
437 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
438
439 void
440 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
441 HOST_WIDE_INT count, unsigned int prec,
442 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
443 int arith)
444 {
445 unsigned HOST_WIDE_INT signmask;
446
447 signmask = (arith
448 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
449 : 0);
450
451 if (SHIFT_COUNT_TRUNCATED)
452 count %= prec;
453
454 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
455 {
456 /* Shifting by the host word size is undefined according to the
457 ANSI standard, so we must handle this as a special case. */
458 *hv = 0;
459 *lv = 0;
460 }
461 else if (count >= HOST_BITS_PER_WIDE_INT)
462 {
463 *hv = 0;
464 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
465 }
466 else
467 {
468 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
469 *lv = ((l1 >> count)
470 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
471 }
472
473 /* Zero / sign extend all bits that are beyond the precision. */
474
475 if (count >= (HOST_WIDE_INT)prec)
476 {
477 *hv = signmask;
478 *lv = signmask;
479 }
480 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
481 ;
482 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
483 {
484 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
485 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
486 }
487 else
488 {
489 *hv = signmask;
490 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
491 *lv |= signmask << (prec - count);
492 }
493 }
494 \f
495 /* Rotate the doubleword integer in L1, H1 left by COUNT places
496 keeping only PREC bits of result.
497 Rotate right if COUNT is negative.
498 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
499
500 void
501 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
502 HOST_WIDE_INT count, unsigned int prec,
503 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
504 {
505 unsigned HOST_WIDE_INT s1l, s2l;
506 HOST_WIDE_INT s1h, s2h;
507
508 count %= prec;
509 if (count < 0)
510 count += prec;
511
512 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
513 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
514 *lv = s1l | s2l;
515 *hv = s1h | s2h;
516 }
517
518 /* Rotate the doubleword integer in L1, H1 left by COUNT places
519 keeping only PREC bits of result. COUNT must be positive.
520 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
521
522 void
523 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
524 HOST_WIDE_INT count, unsigned int prec,
525 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
526 {
527 unsigned HOST_WIDE_INT s1l, s2l;
528 HOST_WIDE_INT s1h, s2h;
529
530 count %= prec;
531 if (count < 0)
532 count += prec;
533
534 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
535 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
536 *lv = s1l | s2l;
537 *hv = s1h | s2h;
538 }
539 \f
540 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
541 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
542 CODE is a tree code for a kind of division, one of
543 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
544 or EXACT_DIV_EXPR
545 It controls how the quotient is rounded to an integer.
546 Return nonzero if the operation overflows.
547 UNS nonzero says do unsigned division. */
548
549 int
550 div_and_round_double (enum tree_code code, int uns,
551 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
552 HOST_WIDE_INT hnum_orig,
553 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
554 HOST_WIDE_INT hden_orig,
555 unsigned HOST_WIDE_INT *lquo,
556 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
557 HOST_WIDE_INT *hrem)
558 {
559 int quo_neg = 0;
560 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
561 HOST_WIDE_INT den[4], quo[4];
562 int i, j;
563 unsigned HOST_WIDE_INT work;
564 unsigned HOST_WIDE_INT carry = 0;
565 unsigned HOST_WIDE_INT lnum = lnum_orig;
566 HOST_WIDE_INT hnum = hnum_orig;
567 unsigned HOST_WIDE_INT lden = lden_orig;
568 HOST_WIDE_INT hden = hden_orig;
569 int overflow = 0;
570
571 if (hden == 0 && lden == 0)
572 overflow = 1, lden = 1;
573
574 /* Calculate quotient sign and convert operands to unsigned. */
575 if (!uns)
576 {
577 if (hnum < 0)
578 {
579 quo_neg = ~ quo_neg;
580 /* (minimum integer) / (-1) is the only overflow case. */
581 if (neg_double (lnum, hnum, &lnum, &hnum)
582 && ((HOST_WIDE_INT) lden & hden) == -1)
583 overflow = 1;
584 }
585 if (hden < 0)
586 {
587 quo_neg = ~ quo_neg;
588 neg_double (lden, hden, &lden, &hden);
589 }
590 }
591
592 if (hnum == 0 && hden == 0)
593 { /* single precision */
594 *hquo = *hrem = 0;
595 /* This unsigned division rounds toward zero. */
596 *lquo = lnum / lden;
597 goto finish_up;
598 }
599
600 if (hnum == 0)
601 { /* trivial case: dividend < divisor */
602 /* hden != 0 already checked. */
603 *hquo = *lquo = 0;
604 *hrem = hnum;
605 *lrem = lnum;
606 goto finish_up;
607 }
608
609 memset (quo, 0, sizeof quo);
610
611 memset (num, 0, sizeof num); /* to zero 9th element */
612 memset (den, 0, sizeof den);
613
614 encode (num, lnum, hnum);
615 encode (den, lden, hden);
616
617 /* Special code for when the divisor < BASE. */
618 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
619 {
620 /* hnum != 0 already checked. */
621 for (i = 4 - 1; i >= 0; i--)
622 {
623 work = num[i] + carry * BASE;
624 quo[i] = work / lden;
625 carry = work % lden;
626 }
627 }
628 else
629 {
630 /* Full double precision division,
631 with thanks to Don Knuth's "Seminumerical Algorithms". */
632 int num_hi_sig, den_hi_sig;
633 unsigned HOST_WIDE_INT quo_est, scale;
634
635 /* Find the highest nonzero divisor digit. */
636 for (i = 4 - 1;; i--)
637 if (den[i] != 0)
638 {
639 den_hi_sig = i;
640 break;
641 }
642
643 /* Insure that the first digit of the divisor is at least BASE/2.
644 This is required by the quotient digit estimation algorithm. */
645
646 scale = BASE / (den[den_hi_sig] + 1);
647 if (scale > 1)
648 { /* scale divisor and dividend */
649 carry = 0;
650 for (i = 0; i <= 4 - 1; i++)
651 {
652 work = (num[i] * scale) + carry;
653 num[i] = LOWPART (work);
654 carry = HIGHPART (work);
655 }
656
657 num[4] = carry;
658 carry = 0;
659 for (i = 0; i <= 4 - 1; i++)
660 {
661 work = (den[i] * scale) + carry;
662 den[i] = LOWPART (work);
663 carry = HIGHPART (work);
664 if (den[i] != 0) den_hi_sig = i;
665 }
666 }
667
668 num_hi_sig = 4;
669
670 /* Main loop */
671 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
672 {
673 /* Guess the next quotient digit, quo_est, by dividing the first
674 two remaining dividend digits by the high order quotient digit.
675 quo_est is never low and is at most 2 high. */
676 unsigned HOST_WIDE_INT tmp;
677
678 num_hi_sig = i + den_hi_sig + 1;
679 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
680 if (num[num_hi_sig] != den[den_hi_sig])
681 quo_est = work / den[den_hi_sig];
682 else
683 quo_est = BASE - 1;
684
685 /* Refine quo_est so it's usually correct, and at most one high. */
686 tmp = work - quo_est * den[den_hi_sig];
687 if (tmp < BASE
688 && (den[den_hi_sig - 1] * quo_est
689 > (tmp * BASE + num[num_hi_sig - 2])))
690 quo_est--;
691
692 /* Try QUO_EST as the quotient digit, by multiplying the
693 divisor by QUO_EST and subtracting from the remaining dividend.
694 Keep in mind that QUO_EST is the I - 1st digit. */
695
696 carry = 0;
697 for (j = 0; j <= den_hi_sig; j++)
698 {
699 work = quo_est * den[j] + carry;
700 carry = HIGHPART (work);
701 work = num[i + j] - LOWPART (work);
702 num[i + j] = LOWPART (work);
703 carry += HIGHPART (work) != 0;
704 }
705
706 /* If quo_est was high by one, then num[i] went negative and
707 we need to correct things. */
708 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
709 {
710 quo_est--;
711 carry = 0; /* add divisor back in */
712 for (j = 0; j <= den_hi_sig; j++)
713 {
714 work = num[i + j] + den[j] + carry;
715 carry = HIGHPART (work);
716 num[i + j] = LOWPART (work);
717 }
718
719 num [num_hi_sig] += carry;
720 }
721
722 /* Store the quotient digit. */
723 quo[i] = quo_est;
724 }
725 }
726
727 decode (quo, lquo, hquo);
728
729 finish_up:
730 /* If result is negative, make it so. */
731 if (quo_neg)
732 neg_double (*lquo, *hquo, lquo, hquo);
733
734 /* Compute trial remainder: rem = num - (quo * den) */
735 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
736 neg_double (*lrem, *hrem, lrem, hrem);
737 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
738
739 switch (code)
740 {
741 case TRUNC_DIV_EXPR:
742 case TRUNC_MOD_EXPR: /* round toward zero */
743 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
744 return overflow;
745
746 case FLOOR_DIV_EXPR:
747 case FLOOR_MOD_EXPR: /* round toward negative infinity */
748 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
749 {
750 /* quo = quo - 1; */
751 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
752 lquo, hquo);
753 }
754 else
755 return overflow;
756 break;
757
758 case CEIL_DIV_EXPR:
759 case CEIL_MOD_EXPR: /* round toward positive infinity */
760 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
761 {
762 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
763 lquo, hquo);
764 }
765 else
766 return overflow;
767 break;
768
769 case ROUND_DIV_EXPR:
770 case ROUND_MOD_EXPR: /* round to closest integer */
771 {
772 unsigned HOST_WIDE_INT labs_rem = *lrem;
773 HOST_WIDE_INT habs_rem = *hrem;
774 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
775 HOST_WIDE_INT habs_den = hden, htwice;
776
777 /* Get absolute values. */
778 if (*hrem < 0)
779 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
780 if (hden < 0)
781 neg_double (lden, hden, &labs_den, &habs_den);
782
783 /* If (2 * abs (lrem) >= abs (lden)) */
784 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
785 labs_rem, habs_rem, &ltwice, &htwice);
786
787 if (((unsigned HOST_WIDE_INT) habs_den
788 < (unsigned HOST_WIDE_INT) htwice)
789 || (((unsigned HOST_WIDE_INT) habs_den
790 == (unsigned HOST_WIDE_INT) htwice)
791 && (labs_den < ltwice)))
792 {
793 if (*hquo < 0)
794 /* quo = quo - 1; */
795 add_double (*lquo, *hquo,
796 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
797 else
798 /* quo = quo + 1; */
799 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
800 lquo, hquo);
801 }
802 else
803 return overflow;
804 }
805 break;
806
807 default:
808 abort ();
809 }
810
811 /* Compute true remainder: rem = num - (quo * den) */
812 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
813 neg_double (*lrem, *hrem, lrem, hrem);
814 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
815 return overflow;
816 }
817 \f
818 /* Return true if built-in mathematical function specified by CODE
819 preserves the sign of it argument, i.e. -f(x) == f(-x). */
820
821 static bool
822 negate_mathfn_p (enum built_in_function code)
823 {
824 switch (code)
825 {
826 case BUILT_IN_ASIN:
827 case BUILT_IN_ASINF:
828 case BUILT_IN_ASINL:
829 case BUILT_IN_ATAN:
830 case BUILT_IN_ATANF:
831 case BUILT_IN_ATANL:
832 case BUILT_IN_SIN:
833 case BUILT_IN_SINF:
834 case BUILT_IN_SINL:
835 case BUILT_IN_TAN:
836 case BUILT_IN_TANF:
837 case BUILT_IN_TANL:
838 return true;
839
840 default:
841 break;
842 }
843 return false;
844 }
845
846 /* Determine whether an expression T can be cheaply negated using
847 the function negate_expr. */
848
849 static bool
850 negate_expr_p (tree t)
851 {
852 unsigned HOST_WIDE_INT val;
853 unsigned int prec;
854 tree type;
855
856 if (t == 0)
857 return false;
858
859 type = TREE_TYPE (t);
860
861 STRIP_SIGN_NOPS (t);
862 switch (TREE_CODE (t))
863 {
864 case INTEGER_CST:
865 if (TYPE_UNSIGNED (type) || ! flag_trapv)
866 return true;
867
868 /* Check that -CST will not overflow type. */
869 prec = TYPE_PRECISION (type);
870 if (prec > HOST_BITS_PER_WIDE_INT)
871 {
872 if (TREE_INT_CST_LOW (t) != 0)
873 return true;
874 prec -= HOST_BITS_PER_WIDE_INT;
875 val = TREE_INT_CST_HIGH (t);
876 }
877 else
878 val = TREE_INT_CST_LOW (t);
879 if (prec < HOST_BITS_PER_WIDE_INT)
880 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
881 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
882
883 case REAL_CST:
884 case NEGATE_EXPR:
885 return true;
886
887 case COMPLEX_CST:
888 return negate_expr_p (TREE_REALPART (t))
889 && negate_expr_p (TREE_IMAGPART (t));
890
891 case PLUS_EXPR:
892 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
893 return false;
894 /* -(A + B) -> (-B) - A. */
895 if (negate_expr_p (TREE_OPERAND (t, 1))
896 && reorder_operands_p (TREE_OPERAND (t, 0),
897 TREE_OPERAND (t, 1)))
898 return true;
899 /* -(A + B) -> (-A) - B. */
900 return negate_expr_p (TREE_OPERAND (t, 0));
901
902 case MINUS_EXPR:
903 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
904 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
905 && reorder_operands_p (TREE_OPERAND (t, 0),
906 TREE_OPERAND (t, 1));
907
908 case MULT_EXPR:
909 if (TYPE_UNSIGNED (TREE_TYPE (t)))
910 break;
911
912 /* Fall through. */
913
914 case RDIV_EXPR:
915 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
916 return negate_expr_p (TREE_OPERAND (t, 1))
917 || negate_expr_p (TREE_OPERAND (t, 0));
918 break;
919
920 case NOP_EXPR:
921 /* Negate -((double)float) as (double)(-float). */
922 if (TREE_CODE (type) == REAL_TYPE)
923 {
924 tree tem = strip_float_extensions (t);
925 if (tem != t)
926 return negate_expr_p (tem);
927 }
928 break;
929
930 case CALL_EXPR:
931 /* Negate -f(x) as f(-x). */
932 if (negate_mathfn_p (builtin_mathfn_code (t)))
933 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
934 break;
935
936 case RSHIFT_EXPR:
937 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
938 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
939 {
940 tree op1 = TREE_OPERAND (t, 1);
941 if (TREE_INT_CST_HIGH (op1) == 0
942 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
943 == TREE_INT_CST_LOW (op1))
944 return true;
945 }
946 break;
947
948 default:
949 break;
950 }
951 return false;
952 }
953
954 /* Given T, an expression, return the negation of T. Allow for T to be
955 null, in which case return null. */
956
957 static tree
958 negate_expr (tree t)
959 {
960 tree type;
961 tree tem;
962
963 if (t == 0)
964 return 0;
965
966 type = TREE_TYPE (t);
967 STRIP_SIGN_NOPS (t);
968
969 switch (TREE_CODE (t))
970 {
971 case INTEGER_CST:
972 tem = fold_negate_const (t, type);
973 if (! TREE_OVERFLOW (tem)
974 || TYPE_UNSIGNED (type)
975 || ! flag_trapv)
976 return tem;
977 break;
978
979 case REAL_CST:
980 tem = fold_negate_const (t, type);
981 /* Two's complement FP formats, such as c4x, may overflow. */
982 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
983 return fold_convert (type, tem);
984 break;
985
986 case COMPLEX_CST:
987 {
988 tree rpart = negate_expr (TREE_REALPART (t));
989 tree ipart = negate_expr (TREE_IMAGPART (t));
990
991 if ((TREE_CODE (rpart) == REAL_CST
992 && TREE_CODE (ipart) == REAL_CST)
993 || (TREE_CODE (rpart) == INTEGER_CST
994 && TREE_CODE (ipart) == INTEGER_CST))
995 return build_complex (type, rpart, ipart);
996 }
997 break;
998
999 case NEGATE_EXPR:
1000 return fold_convert (type, TREE_OPERAND (t, 0));
1001
1002 case PLUS_EXPR:
1003 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1004 {
1005 /* -(A + B) -> (-B) - A. */
1006 if (negate_expr_p (TREE_OPERAND (t, 1))
1007 && reorder_operands_p (TREE_OPERAND (t, 0),
1008 TREE_OPERAND (t, 1)))
1009 {
1010 tem = negate_expr (TREE_OPERAND (t, 1));
1011 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1012 tem, TREE_OPERAND (t, 0)));
1013 return fold_convert (type, tem);
1014 }
1015
1016 /* -(A + B) -> (-A) - B. */
1017 if (negate_expr_p (TREE_OPERAND (t, 0)))
1018 {
1019 tem = negate_expr (TREE_OPERAND (t, 0));
1020 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1021 tem, TREE_OPERAND (t, 1)));
1022 return fold_convert (type, tem);
1023 }
1024 }
1025 break;
1026
1027 case MINUS_EXPR:
1028 /* - (A - B) -> B - A */
1029 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1030 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1031 return fold_convert (type,
1032 fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1033 TREE_OPERAND (t, 1),
1034 TREE_OPERAND (t, 0))));
1035 break;
1036
1037 case MULT_EXPR:
1038 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1039 break;
1040
1041 /* Fall through. */
1042
1043 case RDIV_EXPR:
1044 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1045 {
1046 tem = TREE_OPERAND (t, 1);
1047 if (negate_expr_p (tem))
1048 return fold_convert (type,
1049 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1050 TREE_OPERAND (t, 0),
1051 negate_expr (tem))));
1052 tem = TREE_OPERAND (t, 0);
1053 if (negate_expr_p (tem))
1054 return fold_convert (type,
1055 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1056 negate_expr (tem),
1057 TREE_OPERAND (t, 1))));
1058 }
1059 break;
1060
1061 case NOP_EXPR:
1062 /* Convert -((double)float) into (double)(-float). */
1063 if (TREE_CODE (type) == REAL_TYPE)
1064 {
1065 tem = strip_float_extensions (t);
1066 if (tem != t && negate_expr_p (tem))
1067 return fold_convert (type, negate_expr (tem));
1068 }
1069 break;
1070
1071 case CALL_EXPR:
1072 /* Negate -f(x) as f(-x). */
1073 if (negate_mathfn_p (builtin_mathfn_code (t))
1074 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1075 {
1076 tree fndecl, arg, arglist;
1077
1078 fndecl = get_callee_fndecl (t);
1079 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1080 arglist = build_tree_list (NULL_TREE, arg);
1081 return build_function_call_expr (fndecl, arglist);
1082 }
1083 break;
1084
1085 case RSHIFT_EXPR:
1086 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1087 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1088 {
1089 tree op1 = TREE_OPERAND (t, 1);
1090 if (TREE_INT_CST_HIGH (op1) == 0
1091 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1092 == TREE_INT_CST_LOW (op1))
1093 {
1094 tree ntype = TYPE_UNSIGNED (type)
1095 ? lang_hooks.types.signed_type (type)
1096 : lang_hooks.types.unsigned_type (type);
1097 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1098 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1099 return fold_convert (type, temp);
1100 }
1101 }
1102 break;
1103
1104 default:
1105 break;
1106 }
1107
1108 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1109 return fold_convert (type, tem);
1110 }
1111 \f
1112 /* Split a tree IN into a constant, literal and variable parts that could be
1113 combined with CODE to make IN. "constant" means an expression with
1114 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1115 commutative arithmetic operation. Store the constant part into *CONP,
1116 the literal in *LITP and return the variable part. If a part isn't
1117 present, set it to null. If the tree does not decompose in this way,
1118 return the entire tree as the variable part and the other parts as null.
1119
1120 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1121 case, we negate an operand that was subtracted. Except if it is a
1122 literal for which we use *MINUS_LITP instead.
1123
1124 If NEGATE_P is true, we are negating all of IN, again except a literal
1125 for which we use *MINUS_LITP instead.
1126
1127 If IN is itself a literal or constant, return it as appropriate.
1128
1129 Note that we do not guarantee that any of the three values will be the
1130 same type as IN, but they will have the same signedness and mode. */
1131
1132 static tree
1133 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1134 tree *minus_litp, int negate_p)
1135 {
1136 tree var = 0;
1137
1138 *conp = 0;
1139 *litp = 0;
1140 *minus_litp = 0;
1141
1142 /* Strip any conversions that don't change the machine mode or signedness. */
1143 STRIP_SIGN_NOPS (in);
1144
1145 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1146 *litp = in;
1147 else if (TREE_CODE (in) == code
1148 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1149 /* We can associate addition and subtraction together (even
1150 though the C standard doesn't say so) for integers because
1151 the value is not affected. For reals, the value might be
1152 affected, so we can't. */
1153 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1154 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1155 {
1156 tree op0 = TREE_OPERAND (in, 0);
1157 tree op1 = TREE_OPERAND (in, 1);
1158 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1159 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1160
1161 /* First see if either of the operands is a literal, then a constant. */
1162 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1163 *litp = op0, op0 = 0;
1164 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1165 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1166
1167 if (op0 != 0 && TREE_CONSTANT (op0))
1168 *conp = op0, op0 = 0;
1169 else if (op1 != 0 && TREE_CONSTANT (op1))
1170 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1171
1172 /* If we haven't dealt with either operand, this is not a case we can
1173 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1174 if (op0 != 0 && op1 != 0)
1175 var = in;
1176 else if (op0 != 0)
1177 var = op0;
1178 else
1179 var = op1, neg_var_p = neg1_p;
1180
1181 /* Now do any needed negations. */
1182 if (neg_litp_p)
1183 *minus_litp = *litp, *litp = 0;
1184 if (neg_conp_p)
1185 *conp = negate_expr (*conp);
1186 if (neg_var_p)
1187 var = negate_expr (var);
1188 }
1189 else if (TREE_CONSTANT (in))
1190 *conp = in;
1191 else
1192 var = in;
1193
1194 if (negate_p)
1195 {
1196 if (*litp)
1197 *minus_litp = *litp, *litp = 0;
1198 else if (*minus_litp)
1199 *litp = *minus_litp, *minus_litp = 0;
1200 *conp = negate_expr (*conp);
1201 var = negate_expr (var);
1202 }
1203
1204 return var;
1205 }
1206
1207 /* Re-associate trees split by the above function. T1 and T2 are either
1208 expressions to associate or null. Return the new expression, if any. If
1209 we build an operation, do it in TYPE and with CODE. */
1210
1211 static tree
1212 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1213 {
1214 if (t1 == 0)
1215 return t2;
1216 else if (t2 == 0)
1217 return t1;
1218
1219 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1220 try to fold this since we will have infinite recursion. But do
1221 deal with any NEGATE_EXPRs. */
1222 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1223 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1224 {
1225 if (code == PLUS_EXPR)
1226 {
1227 if (TREE_CODE (t1) == NEGATE_EXPR)
1228 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1229 fold_convert (type, TREE_OPERAND (t1, 0)));
1230 else if (TREE_CODE (t2) == NEGATE_EXPR)
1231 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1232 fold_convert (type, TREE_OPERAND (t2, 0)));
1233 }
1234 return build2 (code, type, fold_convert (type, t1),
1235 fold_convert (type, t2));
1236 }
1237
1238 return fold (build2 (code, type, fold_convert (type, t1),
1239 fold_convert (type, t2)));
1240 }
1241 \f
1242 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1243 to produce a new constant.
1244
1245 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1246
1247 tree
1248 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1249 {
1250 unsigned HOST_WIDE_INT int1l, int2l;
1251 HOST_WIDE_INT int1h, int2h;
1252 unsigned HOST_WIDE_INT low;
1253 HOST_WIDE_INT hi;
1254 unsigned HOST_WIDE_INT garbagel;
1255 HOST_WIDE_INT garbageh;
1256 tree t;
1257 tree type = TREE_TYPE (arg1);
1258 int uns = TYPE_UNSIGNED (type);
1259 int is_sizetype
1260 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1261 int overflow = 0;
1262 int no_overflow = 0;
1263
1264 int1l = TREE_INT_CST_LOW (arg1);
1265 int1h = TREE_INT_CST_HIGH (arg1);
1266 int2l = TREE_INT_CST_LOW (arg2);
1267 int2h = TREE_INT_CST_HIGH (arg2);
1268
1269 switch (code)
1270 {
1271 case BIT_IOR_EXPR:
1272 low = int1l | int2l, hi = int1h | int2h;
1273 break;
1274
1275 case BIT_XOR_EXPR:
1276 low = int1l ^ int2l, hi = int1h ^ int2h;
1277 break;
1278
1279 case BIT_AND_EXPR:
1280 low = int1l & int2l, hi = int1h & int2h;
1281 break;
1282
1283 case RSHIFT_EXPR:
1284 int2l = -int2l;
1285 case LSHIFT_EXPR:
1286 /* It's unclear from the C standard whether shifts can overflow.
1287 The following code ignores overflow; perhaps a C standard
1288 interpretation ruling is needed. */
1289 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1290 &low, &hi, !uns);
1291 no_overflow = 1;
1292 break;
1293
1294 case RROTATE_EXPR:
1295 int2l = - int2l;
1296 case LROTATE_EXPR:
1297 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1298 &low, &hi);
1299 break;
1300
1301 case PLUS_EXPR:
1302 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1303 break;
1304
1305 case MINUS_EXPR:
1306 neg_double (int2l, int2h, &low, &hi);
1307 add_double (int1l, int1h, low, hi, &low, &hi);
1308 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1309 break;
1310
1311 case MULT_EXPR:
1312 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1313 break;
1314
1315 case TRUNC_DIV_EXPR:
1316 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1317 case EXACT_DIV_EXPR:
1318 /* This is a shortcut for a common special case. */
1319 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1320 && ! TREE_CONSTANT_OVERFLOW (arg1)
1321 && ! TREE_CONSTANT_OVERFLOW (arg2)
1322 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1323 {
1324 if (code == CEIL_DIV_EXPR)
1325 int1l += int2l - 1;
1326
1327 low = int1l / int2l, hi = 0;
1328 break;
1329 }
1330
1331 /* ... fall through ... */
1332
1333 case ROUND_DIV_EXPR:
1334 if (int2h == 0 && int2l == 1)
1335 {
1336 low = int1l, hi = int1h;
1337 break;
1338 }
1339 if (int1l == int2l && int1h == int2h
1340 && ! (int1l == 0 && int1h == 0))
1341 {
1342 low = 1, hi = 0;
1343 break;
1344 }
1345 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1346 &low, &hi, &garbagel, &garbageh);
1347 break;
1348
1349 case TRUNC_MOD_EXPR:
1350 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1351 /* This is a shortcut for a common special case. */
1352 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1353 && ! TREE_CONSTANT_OVERFLOW (arg1)
1354 && ! TREE_CONSTANT_OVERFLOW (arg2)
1355 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1356 {
1357 if (code == CEIL_MOD_EXPR)
1358 int1l += int2l - 1;
1359 low = int1l % int2l, hi = 0;
1360 break;
1361 }
1362
1363 /* ... fall through ... */
1364
1365 case ROUND_MOD_EXPR:
1366 overflow = div_and_round_double (code, uns,
1367 int1l, int1h, int2l, int2h,
1368 &garbagel, &garbageh, &low, &hi);
1369 break;
1370
1371 case MIN_EXPR:
1372 case MAX_EXPR:
1373 if (uns)
1374 low = (((unsigned HOST_WIDE_INT) int1h
1375 < (unsigned HOST_WIDE_INT) int2h)
1376 || (((unsigned HOST_WIDE_INT) int1h
1377 == (unsigned HOST_WIDE_INT) int2h)
1378 && int1l < int2l));
1379 else
1380 low = (int1h < int2h
1381 || (int1h == int2h && int1l < int2l));
1382
1383 if (low == (code == MIN_EXPR))
1384 low = int1l, hi = int1h;
1385 else
1386 low = int2l, hi = int2h;
1387 break;
1388
1389 default:
1390 abort ();
1391 }
1392
1393 /* If this is for a sizetype, can be represented as one (signed)
1394 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1395 constants. */
1396 if (is_sizetype
1397 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1398 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1399 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1400 return size_int_type_wide (low, type);
1401 else
1402 {
1403 t = build_int_2 (low, hi);
1404 TREE_TYPE (t) = TREE_TYPE (arg1);
1405 }
1406
1407 TREE_OVERFLOW (t)
1408 = ((notrunc
1409 ? (!uns || is_sizetype) && overflow
1410 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1411 && ! no_overflow))
1412 | TREE_OVERFLOW (arg1)
1413 | TREE_OVERFLOW (arg2));
1414
1415 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1416 So check if force_fit_type truncated the value. */
1417 if (is_sizetype
1418 && ! TREE_OVERFLOW (t)
1419 && (TREE_INT_CST_HIGH (t) != hi
1420 || TREE_INT_CST_LOW (t) != low))
1421 TREE_OVERFLOW (t) = 1;
1422
1423 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1424 | TREE_CONSTANT_OVERFLOW (arg1)
1425 | TREE_CONSTANT_OVERFLOW (arg2));
1426 return t;
1427 }
1428
1429 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1430 constant. We assume ARG1 and ARG2 have the same data type, or at least
1431 are the same kind of constant and the same machine mode.
1432
1433 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1434
1435 static tree
1436 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1437 {
1438 STRIP_NOPS (arg1);
1439 STRIP_NOPS (arg2);
1440
1441 if (TREE_CODE (arg1) == INTEGER_CST)
1442 return int_const_binop (code, arg1, arg2, notrunc);
1443
1444 if (TREE_CODE (arg1) == REAL_CST)
1445 {
1446 enum machine_mode mode;
1447 REAL_VALUE_TYPE d1;
1448 REAL_VALUE_TYPE d2;
1449 REAL_VALUE_TYPE value;
1450 tree t, type;
1451
1452 d1 = TREE_REAL_CST (arg1);
1453 d2 = TREE_REAL_CST (arg2);
1454
1455 type = TREE_TYPE (arg1);
1456 mode = TYPE_MODE (type);
1457
1458 /* Don't perform operation if we honor signaling NaNs and
1459 either operand is a NaN. */
1460 if (HONOR_SNANS (mode)
1461 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1462 return NULL_TREE;
1463
1464 /* Don't perform operation if it would raise a division
1465 by zero exception. */
1466 if (code == RDIV_EXPR
1467 && REAL_VALUES_EQUAL (d2, dconst0)
1468 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1469 return NULL_TREE;
1470
1471 /* If either operand is a NaN, just return it. Otherwise, set up
1472 for floating-point trap; we return an overflow. */
1473 if (REAL_VALUE_ISNAN (d1))
1474 return arg1;
1475 else if (REAL_VALUE_ISNAN (d2))
1476 return arg2;
1477
1478 REAL_ARITHMETIC (value, code, d1, d2);
1479
1480 t = build_real (type, real_value_truncate (mode, value));
1481
1482 TREE_OVERFLOW (t)
1483 = (force_fit_type (t, 0)
1484 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1485 TREE_CONSTANT_OVERFLOW (t)
1486 = TREE_OVERFLOW (t)
1487 | TREE_CONSTANT_OVERFLOW (arg1)
1488 | TREE_CONSTANT_OVERFLOW (arg2);
1489 return t;
1490 }
1491 if (TREE_CODE (arg1) == COMPLEX_CST)
1492 {
1493 tree type = TREE_TYPE (arg1);
1494 tree r1 = TREE_REALPART (arg1);
1495 tree i1 = TREE_IMAGPART (arg1);
1496 tree r2 = TREE_REALPART (arg2);
1497 tree i2 = TREE_IMAGPART (arg2);
1498 tree t;
1499
1500 switch (code)
1501 {
1502 case PLUS_EXPR:
1503 t = build_complex (type,
1504 const_binop (PLUS_EXPR, r1, r2, notrunc),
1505 const_binop (PLUS_EXPR, i1, i2, notrunc));
1506 break;
1507
1508 case MINUS_EXPR:
1509 t = build_complex (type,
1510 const_binop (MINUS_EXPR, r1, r2, notrunc),
1511 const_binop (MINUS_EXPR, i1, i2, notrunc));
1512 break;
1513
1514 case MULT_EXPR:
1515 t = build_complex (type,
1516 const_binop (MINUS_EXPR,
1517 const_binop (MULT_EXPR,
1518 r1, r2, notrunc),
1519 const_binop (MULT_EXPR,
1520 i1, i2, notrunc),
1521 notrunc),
1522 const_binop (PLUS_EXPR,
1523 const_binop (MULT_EXPR,
1524 r1, i2, notrunc),
1525 const_binop (MULT_EXPR,
1526 i1, r2, notrunc),
1527 notrunc));
1528 break;
1529
1530 case RDIV_EXPR:
1531 {
1532 tree magsquared
1533 = const_binop (PLUS_EXPR,
1534 const_binop (MULT_EXPR, r2, r2, notrunc),
1535 const_binop (MULT_EXPR, i2, i2, notrunc),
1536 notrunc);
1537
1538 t = build_complex (type,
1539 const_binop
1540 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1541 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1542 const_binop (PLUS_EXPR,
1543 const_binop (MULT_EXPR, r1, r2,
1544 notrunc),
1545 const_binop (MULT_EXPR, i1, i2,
1546 notrunc),
1547 notrunc),
1548 magsquared, notrunc),
1549 const_binop
1550 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1551 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1552 const_binop (MINUS_EXPR,
1553 const_binop (MULT_EXPR, i1, r2,
1554 notrunc),
1555 const_binop (MULT_EXPR, r1, i2,
1556 notrunc),
1557 notrunc),
1558 magsquared, notrunc));
1559 }
1560 break;
1561
1562 default:
1563 abort ();
1564 }
1565 return t;
1566 }
1567 return 0;
1568 }
1569
1570 /* These are the hash table functions for the hash table of INTEGER_CST
1571 nodes of a sizetype. */
1572
1573 /* Return the hash code code X, an INTEGER_CST. */
1574
1575 static hashval_t
1576 size_htab_hash (const void *x)
1577 {
1578 tree t = (tree) x;
1579
1580 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1581 ^ htab_hash_pointer (TREE_TYPE (t))
1582 ^ (TREE_OVERFLOW (t) << 20));
1583 }
1584
1585 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1586 is the same as that given by *Y, which is the same. */
1587
1588 static int
1589 size_htab_eq (const void *x, const void *y)
1590 {
1591 tree xt = (tree) x;
1592 tree yt = (tree) y;
1593
1594 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1595 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1596 && TREE_TYPE (xt) == TREE_TYPE (yt)
1597 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1598 }
1599 \f
1600 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1601 bits are given by NUMBER and of the sizetype represented by KIND. */
1602
1603 tree
1604 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1605 {
1606 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1607 }
1608
1609 /* Likewise, but the desired type is specified explicitly. */
1610
1611 static GTY (()) tree new_const;
1612 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1613 htab_t size_htab;
1614
1615 tree
1616 size_int_type_wide (HOST_WIDE_INT number, tree type)
1617 {
1618 void **slot;
1619
1620 if (size_htab == 0)
1621 {
1622 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1623 new_const = make_node (INTEGER_CST);
1624 }
1625
1626 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1627 hash table, we return the value from the hash table. Otherwise, we
1628 place that in the hash table and make a new node for the next time. */
1629 TREE_INT_CST_LOW (new_const) = number;
1630 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1631 TREE_TYPE (new_const) = type;
1632 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1633 = force_fit_type (new_const, 0);
1634
1635 slot = htab_find_slot (size_htab, new_const, INSERT);
1636 if (*slot == 0)
1637 {
1638 tree t = new_const;
1639
1640 *slot = new_const;
1641 new_const = make_node (INTEGER_CST);
1642 return t;
1643 }
1644 else
1645 return (tree) *slot;
1646 }
1647
1648 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1649 is a tree code. The type of the result is taken from the operands.
1650 Both must be the same type integer type and it must be a size type.
1651 If the operands are constant, so is the result. */
1652
1653 tree
1654 size_binop (enum tree_code code, tree arg0, tree arg1)
1655 {
1656 tree type = TREE_TYPE (arg0);
1657
1658 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1659 || type != TREE_TYPE (arg1))
1660 abort ();
1661
1662 /* Handle the special case of two integer constants faster. */
1663 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1664 {
1665 /* And some specific cases even faster than that. */
1666 if (code == PLUS_EXPR && integer_zerop (arg0))
1667 return arg1;
1668 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1669 && integer_zerop (arg1))
1670 return arg0;
1671 else if (code == MULT_EXPR && integer_onep (arg0))
1672 return arg1;
1673
1674 /* Handle general case of two integer constants. */
1675 return int_const_binop (code, arg0, arg1, 0);
1676 }
1677
1678 if (arg0 == error_mark_node || arg1 == error_mark_node)
1679 return error_mark_node;
1680
1681 return fold (build2 (code, type, arg0, arg1));
1682 }
1683
1684 /* Given two values, either both of sizetype or both of bitsizetype,
1685 compute the difference between the two values. Return the value
1686 in signed type corresponding to the type of the operands. */
1687
1688 tree
1689 size_diffop (tree arg0, tree arg1)
1690 {
1691 tree type = TREE_TYPE (arg0);
1692 tree ctype;
1693
1694 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1695 || type != TREE_TYPE (arg1))
1696 abort ();
1697
1698 /* If the type is already signed, just do the simple thing. */
1699 if (!TYPE_UNSIGNED (type))
1700 return size_binop (MINUS_EXPR, arg0, arg1);
1701
1702 ctype = (type == bitsizetype || type == ubitsizetype
1703 ? sbitsizetype : ssizetype);
1704
1705 /* If either operand is not a constant, do the conversions to the signed
1706 type and subtract. The hardware will do the right thing with any
1707 overflow in the subtraction. */
1708 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1709 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1710 fold_convert (ctype, arg1));
1711
1712 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1713 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1714 overflow) and negate (which can't either). Special-case a result
1715 of zero while we're here. */
1716 if (tree_int_cst_equal (arg0, arg1))
1717 return fold_convert (ctype, integer_zero_node);
1718 else if (tree_int_cst_lt (arg1, arg0))
1719 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1720 else
1721 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1722 fold_convert (ctype, size_binop (MINUS_EXPR,
1723 arg1, arg0)));
1724 }
1725 \f
1726
1727 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1728 type TYPE. If no simplification can be done return NULL_TREE. */
1729
1730 static tree
1731 fold_convert_const (enum tree_code code, tree type, tree arg1)
1732 {
1733 int overflow = 0;
1734 tree t;
1735
1736 if (TREE_TYPE (arg1) == type)
1737 return arg1;
1738
1739 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1740 {
1741 if (TREE_CODE (arg1) == INTEGER_CST)
1742 {
1743 /* If we would build a constant wider than GCC supports,
1744 leave the conversion unfolded. */
1745 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1746 return NULL_TREE;
1747
1748 /* If we are trying to make a sizetype for a small integer, use
1749 size_int to pick up cached types to reduce duplicate nodes. */
1750 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1751 && !TREE_CONSTANT_OVERFLOW (arg1)
1752 && compare_tree_int (arg1, 10000) < 0)
1753 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1754
1755 /* Given an integer constant, make new constant with new type,
1756 appropriately sign-extended or truncated. */
1757 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1758 TREE_INT_CST_HIGH (arg1));
1759 TREE_TYPE (t) = type;
1760 /* Indicate an overflow if (1) ARG1 already overflowed,
1761 or (2) force_fit_type indicates an overflow.
1762 Tell force_fit_type that an overflow has already occurred
1763 if ARG1 is a too-large unsigned value and T is signed.
1764 But don't indicate an overflow if converting a pointer. */
1765 TREE_OVERFLOW (t)
1766 = ((force_fit_type (t,
1767 (TREE_INT_CST_HIGH (arg1) < 0
1768 && (TYPE_UNSIGNED (type)
1769 < TYPE_UNSIGNED (TREE_TYPE (arg1)))))
1770 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1771 || TREE_OVERFLOW (arg1));
1772 TREE_CONSTANT_OVERFLOW (t)
1773 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1774 return t;
1775 }
1776 else if (TREE_CODE (arg1) == REAL_CST)
1777 {
1778 /* The following code implements the floating point to integer
1779 conversion rules required by the Java Language Specification,
1780 that IEEE NaNs are mapped to zero and values that overflow
1781 the target precision saturate, i.e. values greater than
1782 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1783 are mapped to INT_MIN. These semantics are allowed by the
1784 C and C++ standards that simply state that the behavior of
1785 FP-to-integer conversion is unspecified upon overflow. */
1786
1787 HOST_WIDE_INT high, low;
1788
1789 REAL_VALUE_TYPE r;
1790 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1791
1792 switch (code)
1793 {
1794 case FIX_TRUNC_EXPR:
1795 real_trunc (&r, VOIDmode, &x);
1796 break;
1797
1798 case FIX_CEIL_EXPR:
1799 real_ceil (&r, VOIDmode, &x);
1800 break;
1801
1802 case FIX_FLOOR_EXPR:
1803 real_floor (&r, VOIDmode, &x);
1804 break;
1805
1806 case FIX_ROUND_EXPR:
1807 real_round (&r, VOIDmode, &x);
1808 break;
1809
1810 default:
1811 abort ();
1812 }
1813
1814 /* If R is NaN, return zero and show we have an overflow. */
1815 if (REAL_VALUE_ISNAN (r))
1816 {
1817 overflow = 1;
1818 high = 0;
1819 low = 0;
1820 }
1821
1822 /* See if R is less than the lower bound or greater than the
1823 upper bound. */
1824
1825 if (! overflow)
1826 {
1827 tree lt = TYPE_MIN_VALUE (type);
1828 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1829 if (REAL_VALUES_LESS (r, l))
1830 {
1831 overflow = 1;
1832 high = TREE_INT_CST_HIGH (lt);
1833 low = TREE_INT_CST_LOW (lt);
1834 }
1835 }
1836
1837 if (! overflow)
1838 {
1839 tree ut = TYPE_MAX_VALUE (type);
1840 if (ut)
1841 {
1842 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1843 if (REAL_VALUES_LESS (u, r))
1844 {
1845 overflow = 1;
1846 high = TREE_INT_CST_HIGH (ut);
1847 low = TREE_INT_CST_LOW (ut);
1848 }
1849 }
1850 }
1851
1852 if (! overflow)
1853 REAL_VALUE_TO_INT (&low, &high, r);
1854
1855 t = build_int_2 (low, high);
1856 TREE_TYPE (t) = type;
1857 TREE_OVERFLOW (t)
1858 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1859 TREE_CONSTANT_OVERFLOW (t)
1860 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1861 return t;
1862 }
1863 }
1864 else if (TREE_CODE (type) == REAL_TYPE)
1865 {
1866 if (TREE_CODE (arg1) == INTEGER_CST)
1867 return build_real_from_int_cst (type, arg1);
1868 if (TREE_CODE (arg1) == REAL_CST)
1869 {
1870 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1871 {
1872 /* We make a copy of ARG1 so that we don't modify an
1873 existing constant tree. */
1874 t = copy_node (arg1);
1875 TREE_TYPE (t) = type;
1876 return t;
1877 }
1878
1879 t = build_real (type,
1880 real_value_truncate (TYPE_MODE (type),
1881 TREE_REAL_CST (arg1)));
1882
1883 TREE_OVERFLOW (t)
1884 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1885 TREE_CONSTANT_OVERFLOW (t)
1886 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1887 return t;
1888 }
1889 }
1890 return NULL_TREE;
1891 }
1892
1893 /* Convert expression ARG to type TYPE. Used by the middle-end for
1894 simple conversions in preference to calling the front-end's convert. */
1895
1896 tree
1897 fold_convert (tree type, tree arg)
1898 {
1899 tree orig = TREE_TYPE (arg);
1900 tree tem;
1901
1902 if (type == orig)
1903 return arg;
1904
1905 if (TREE_CODE (arg) == ERROR_MARK
1906 || TREE_CODE (type) == ERROR_MARK
1907 || TREE_CODE (orig) == ERROR_MARK)
1908 return error_mark_node;
1909
1910 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1911 return fold (build1 (NOP_EXPR, type, arg));
1912
1913 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)
1914 || TREE_CODE (type) == OFFSET_TYPE)
1915 {
1916 if (TREE_CODE (arg) == INTEGER_CST)
1917 {
1918 tem = fold_convert_const (NOP_EXPR, type, arg);
1919 if (tem != NULL_TREE)
1920 return tem;
1921 }
1922 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1923 || TREE_CODE (orig) == OFFSET_TYPE)
1924 return fold (build1 (NOP_EXPR, type, arg));
1925 if (TREE_CODE (orig) == COMPLEX_TYPE)
1926 {
1927 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1928 return fold_convert (type, tem);
1929 }
1930 if (TREE_CODE (orig) == VECTOR_TYPE
1931 && GET_MODE_SIZE (TYPE_MODE (type))
1932 == GET_MODE_SIZE (TYPE_MODE (orig)))
1933 return fold (build1 (NOP_EXPR, type, arg));
1934 }
1935 else if (TREE_CODE (type) == REAL_TYPE)
1936 {
1937 if (TREE_CODE (arg) == INTEGER_CST)
1938 {
1939 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1940 if (tem != NULL_TREE)
1941 return tem;
1942 }
1943 else if (TREE_CODE (arg) == REAL_CST)
1944 {
1945 tem = fold_convert_const (NOP_EXPR, type, arg);
1946 if (tem != NULL_TREE)
1947 return tem;
1948 }
1949
1950 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1951 return fold (build1 (FLOAT_EXPR, type, arg));
1952 if (TREE_CODE (orig) == REAL_TYPE)
1953 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1954 type, arg));
1955 if (TREE_CODE (orig) == COMPLEX_TYPE)
1956 {
1957 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1958 return fold_convert (type, tem);
1959 }
1960 }
1961 else if (TREE_CODE (type) == COMPLEX_TYPE)
1962 {
1963 if (INTEGRAL_TYPE_P (orig)
1964 || POINTER_TYPE_P (orig)
1965 || TREE_CODE (orig) == REAL_TYPE)
1966 return build2 (COMPLEX_EXPR, type,
1967 fold_convert (TREE_TYPE (type), arg),
1968 fold_convert (TREE_TYPE (type), integer_zero_node));
1969 if (TREE_CODE (orig) == COMPLEX_TYPE)
1970 {
1971 tree rpart, ipart;
1972
1973 if (TREE_CODE (arg) == COMPLEX_EXPR)
1974 {
1975 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1976 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1977 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1978 }
1979
1980 arg = save_expr (arg);
1981 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1982 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1983 rpart = fold_convert (TREE_TYPE (type), rpart);
1984 ipart = fold_convert (TREE_TYPE (type), ipart);
1985 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1986 }
1987 }
1988 else if (TREE_CODE (type) == VECTOR_TYPE)
1989 {
1990 if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1991 && GET_MODE_SIZE (TYPE_MODE (type))
1992 == GET_MODE_SIZE (TYPE_MODE (orig)))
1993 return fold (build1 (NOP_EXPR, type, arg));
1994 if (TREE_CODE (orig) == VECTOR_TYPE
1995 && GET_MODE_SIZE (TYPE_MODE (type))
1996 == GET_MODE_SIZE (TYPE_MODE (orig)))
1997 return fold (build1 (NOP_EXPR, type, arg));
1998 }
1999 else if (VOID_TYPE_P (type))
2000 return fold (build1 (CONVERT_EXPR, type, arg));
2001 abort ();
2002 }
2003 \f
2004 /* Return an expr equal to X but certainly not valid as an lvalue. */
2005
2006 tree
2007 non_lvalue (tree x)
2008 {
2009 /* We only need to wrap lvalue tree codes. */
2010 switch (TREE_CODE (x))
2011 {
2012 case VAR_DECL:
2013 case PARM_DECL:
2014 case RESULT_DECL:
2015 case LABEL_DECL:
2016 case FUNCTION_DECL:
2017 case SSA_NAME:
2018
2019 case COMPONENT_REF:
2020 case INDIRECT_REF:
2021 case ARRAY_REF:
2022 case ARRAY_RANGE_REF:
2023 case BIT_FIELD_REF:
2024 case BUFFER_REF:
2025 case OBJ_TYPE_REF:
2026
2027 case REALPART_EXPR:
2028 case IMAGPART_EXPR:
2029 case PREINCREMENT_EXPR:
2030 case PREDECREMENT_EXPR:
2031 case SAVE_EXPR:
2032 case UNSAVE_EXPR:
2033 case TRY_CATCH_EXPR:
2034 case WITH_CLEANUP_EXPR:
2035 case COMPOUND_EXPR:
2036 case MODIFY_EXPR:
2037 case TARGET_EXPR:
2038 case COND_EXPR:
2039 case BIND_EXPR:
2040 case MIN_EXPR:
2041 case MAX_EXPR:
2042 case RTL_EXPR:
2043 break;
2044
2045 default:
2046 /* Assume the worst for front-end tree codes. */
2047 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2048 break;
2049 return x;
2050 }
2051 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2052 }
2053
2054 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2055 Zero means allow extended lvalues. */
2056
2057 int pedantic_lvalues;
2058
2059 /* When pedantic, return an expr equal to X but certainly not valid as a
2060 pedantic lvalue. Otherwise, return X. */
2061
2062 tree
2063 pedantic_non_lvalue (tree x)
2064 {
2065 if (pedantic_lvalues)
2066 return non_lvalue (x);
2067 else
2068 return x;
2069 }
2070 \f
2071 /* Given a tree comparison code, return the code that is the logical inverse
2072 of the given code. It is not safe to do this for floating-point
2073 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2074 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2075
2076 static enum tree_code
2077 invert_tree_comparison (enum tree_code code, bool honor_nans)
2078 {
2079 if (honor_nans && flag_trapping_math)
2080 return ERROR_MARK;
2081
2082 switch (code)
2083 {
2084 case EQ_EXPR:
2085 return NE_EXPR;
2086 case NE_EXPR:
2087 return EQ_EXPR;
2088 case GT_EXPR:
2089 return honor_nans ? UNLE_EXPR : LE_EXPR;
2090 case GE_EXPR:
2091 return honor_nans ? UNLT_EXPR : LT_EXPR;
2092 case LT_EXPR:
2093 return honor_nans ? UNGE_EXPR : GE_EXPR;
2094 case LE_EXPR:
2095 return honor_nans ? UNGT_EXPR : GT_EXPR;
2096 case LTGT_EXPR:
2097 return UNEQ_EXPR;
2098 case UNEQ_EXPR:
2099 return LTGT_EXPR;
2100 case UNGT_EXPR:
2101 return LE_EXPR;
2102 case UNGE_EXPR:
2103 return LT_EXPR;
2104 case UNLT_EXPR:
2105 return GE_EXPR;
2106 case UNLE_EXPR:
2107 return GT_EXPR;
2108 case ORDERED_EXPR:
2109 return UNORDERED_EXPR;
2110 case UNORDERED_EXPR:
2111 return ORDERED_EXPR;
2112 default:
2113 abort ();
2114 }
2115 }
2116
2117 /* Similar, but return the comparison that results if the operands are
2118 swapped. This is safe for floating-point. */
2119
2120 enum tree_code
2121 swap_tree_comparison (enum tree_code code)
2122 {
2123 switch (code)
2124 {
2125 case EQ_EXPR:
2126 case NE_EXPR:
2127 return code;
2128 case GT_EXPR:
2129 return LT_EXPR;
2130 case GE_EXPR:
2131 return LE_EXPR;
2132 case LT_EXPR:
2133 return GT_EXPR;
2134 case LE_EXPR:
2135 return GE_EXPR;
2136 default:
2137 abort ();
2138 }
2139 }
2140
2141
2142 /* Convert a comparison tree code from an enum tree_code representation
2143 into a compcode bit-based encoding. This function is the inverse of
2144 compcode_to_comparison. */
2145
2146 static enum comparison_code
2147 comparison_to_compcode (enum tree_code code)
2148 {
2149 switch (code)
2150 {
2151 case LT_EXPR:
2152 return COMPCODE_LT;
2153 case EQ_EXPR:
2154 return COMPCODE_EQ;
2155 case LE_EXPR:
2156 return COMPCODE_LE;
2157 case GT_EXPR:
2158 return COMPCODE_GT;
2159 case NE_EXPR:
2160 return COMPCODE_NE;
2161 case GE_EXPR:
2162 return COMPCODE_GE;
2163 case ORDERED_EXPR:
2164 return COMPCODE_ORD;
2165 case UNORDERED_EXPR:
2166 return COMPCODE_UNORD;
2167 case UNLT_EXPR:
2168 return COMPCODE_UNLT;
2169 case UNEQ_EXPR:
2170 return COMPCODE_UNEQ;
2171 case UNLE_EXPR:
2172 return COMPCODE_UNLE;
2173 case UNGT_EXPR:
2174 return COMPCODE_UNGT;
2175 case LTGT_EXPR:
2176 return COMPCODE_LTGT;
2177 case UNGE_EXPR:
2178 return COMPCODE_UNGE;
2179 default:
2180 abort ();
2181 }
2182 }
2183
2184 /* Convert a compcode bit-based encoding of a comparison operator back
2185 to GCC's enum tree_code representation. This function is the
2186 inverse of comparison_to_compcode. */
2187
2188 static enum tree_code
2189 compcode_to_comparison (enum comparison_code code)
2190 {
2191 switch (code)
2192 {
2193 case COMPCODE_LT:
2194 return LT_EXPR;
2195 case COMPCODE_EQ:
2196 return EQ_EXPR;
2197 case COMPCODE_LE:
2198 return LE_EXPR;
2199 case COMPCODE_GT:
2200 return GT_EXPR;
2201 case COMPCODE_NE:
2202 return NE_EXPR;
2203 case COMPCODE_GE:
2204 return GE_EXPR;
2205 case COMPCODE_ORD:
2206 return ORDERED_EXPR;
2207 case COMPCODE_UNORD:
2208 return UNORDERED_EXPR;
2209 case COMPCODE_UNLT:
2210 return UNLT_EXPR;
2211 case COMPCODE_UNEQ:
2212 return UNEQ_EXPR;
2213 case COMPCODE_UNLE:
2214 return UNLE_EXPR;
2215 case COMPCODE_UNGT:
2216 return UNGT_EXPR;
2217 case COMPCODE_LTGT:
2218 return LTGT_EXPR;
2219 case COMPCODE_UNGE:
2220 return UNGE_EXPR;
2221 default:
2222 abort ();
2223 }
2224 }
2225
2226 /* Return a tree for the comparison which is the combination of
2227 doing the AND or OR (depending on CODE) of the two operations LCODE
2228 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2229 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2230 if this makes the transformation invalid. */
2231
2232 tree
2233 combine_comparisons (enum tree_code code, enum tree_code lcode,
2234 enum tree_code rcode, tree truth_type,
2235 tree ll_arg, tree lr_arg)
2236 {
2237 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2238 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2239 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2240 enum comparison_code compcode;
2241
2242 switch (code)
2243 {
2244 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2245 compcode = lcompcode & rcompcode;
2246 break;
2247
2248 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2249 compcode = lcompcode | rcompcode;
2250 break;
2251
2252 default:
2253 return NULL_TREE;
2254 }
2255
2256 if (!honor_nans)
2257 {
2258 /* Eliminate unordered comparisons, as well as LTGT and ORD
2259 which are not used unless the mode has NaNs. */
2260 compcode &= ~COMPCODE_UNORD;
2261 if (compcode == COMPCODE_LTGT)
2262 compcode = COMPCODE_NE;
2263 else if (compcode == COMPCODE_ORD)
2264 compcode = COMPCODE_TRUE;
2265 }
2266 else if (flag_trapping_math)
2267 {
2268 /* Check that the original operation and the optimized ones will trap
2269 under the same condition. */
2270 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2271 && (lcompcode != COMPCODE_EQ)
2272 && (lcompcode != COMPCODE_ORD);
2273 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2274 && (rcompcode != COMPCODE_EQ)
2275 && (rcompcode != COMPCODE_ORD);
2276 bool trap = (compcode & COMPCODE_UNORD) == 0
2277 && (compcode != COMPCODE_EQ)
2278 && (compcode != COMPCODE_ORD);
2279
2280 /* In a short-circuited boolean expression the LHS might be
2281 such that the RHS, if evaluated, will never trap. For
2282 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2283 if neither x nor y is NaN. (This is a mixed blessing: for
2284 example, the expression above will never trap, hence
2285 optimizing it to x < y would be invalid). */
2286 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2287 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2288 rtrap = false;
2289
2290 /* If the comparison was short-circuited, and only the RHS
2291 trapped, we may now generate a spurious trap. */
2292 if (rtrap && !ltrap
2293 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2294 return NULL_TREE;
2295
2296 /* If we changed the conditions that cause a trap, we lose. */
2297 if ((ltrap || rtrap) != trap)
2298 return NULL_TREE;
2299 }
2300
2301 if (compcode == COMPCODE_TRUE)
2302 return constant_boolean_node (true, truth_type);
2303 else if (compcode == COMPCODE_FALSE)
2304 return constant_boolean_node (false, truth_type);
2305 else
2306 return fold (build2 (compcode_to_comparison (compcode),
2307 truth_type, ll_arg, lr_arg));
2308 }
2309
2310 /* Return nonzero if CODE is a tree code that represents a truth value. */
2311
2312 static int
2313 truth_value_p (enum tree_code code)
2314 {
2315 return (TREE_CODE_CLASS (code) == '<'
2316 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2317 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2318 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2319 }
2320 \f
2321 /* Return nonzero if two operands (typically of the same tree node)
2322 are necessarily equal. If either argument has side-effects this
2323 function returns zero. FLAGS modifies behavior as follows:
2324
2325 If OEP_ONLY_CONST is set, only return nonzero for constants.
2326 This function tests whether the operands are indistinguishable;
2327 it does not test whether they are equal using C's == operation.
2328 The distinction is important for IEEE floating point, because
2329 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2330 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2331
2332 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2333 even though it may hold multiple values during a function.
2334 This is because a GCC tree node guarantees that nothing else is
2335 executed between the evaluation of its "operands" (which may often
2336 be evaluated in arbitrary order). Hence if the operands themselves
2337 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2338 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2339 unset means assuming isochronic (or instantaneous) tree equivalence.
2340 Unless comparing arbitrary expression trees, such as from different
2341 statements, this flag can usually be left unset.
2342
2343 If OEP_PURE_SAME is set, then pure functions with identical arguments
2344 are considered the same. It is used when the caller has other ways
2345 to ensure that global memory is unchanged in between. */
2346
2347 int
2348 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2349 {
2350 /* If either is ERROR_MARK, they aren't equal. */
2351 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2352 return 0;
2353
2354 /* If both types don't have the same signedness, then we can't consider
2355 them equal. We must check this before the STRIP_NOPS calls
2356 because they may change the signedness of the arguments. */
2357 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2358 return 0;
2359
2360 STRIP_NOPS (arg0);
2361 STRIP_NOPS (arg1);
2362
2363 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2364 /* This is needed for conversions and for COMPONENT_REF.
2365 Might as well play it safe and always test this. */
2366 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2367 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2368 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2369 return 0;
2370
2371 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2372 We don't care about side effects in that case because the SAVE_EXPR
2373 takes care of that for us. In all other cases, two expressions are
2374 equal if they have no side effects. If we have two identical
2375 expressions with side effects that should be treated the same due
2376 to the only side effects being identical SAVE_EXPR's, that will
2377 be detected in the recursive calls below. */
2378 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2379 && (TREE_CODE (arg0) == SAVE_EXPR
2380 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2381 return 1;
2382
2383 /* Next handle constant cases, those for which we can return 1 even
2384 if ONLY_CONST is set. */
2385 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2386 switch (TREE_CODE (arg0))
2387 {
2388 case INTEGER_CST:
2389 return (! TREE_CONSTANT_OVERFLOW (arg0)
2390 && ! TREE_CONSTANT_OVERFLOW (arg1)
2391 && tree_int_cst_equal (arg0, arg1));
2392
2393 case REAL_CST:
2394 return (! TREE_CONSTANT_OVERFLOW (arg0)
2395 && ! TREE_CONSTANT_OVERFLOW (arg1)
2396 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2397 TREE_REAL_CST (arg1)));
2398
2399 case VECTOR_CST:
2400 {
2401 tree v1, v2;
2402
2403 if (TREE_CONSTANT_OVERFLOW (arg0)
2404 || TREE_CONSTANT_OVERFLOW (arg1))
2405 return 0;
2406
2407 v1 = TREE_VECTOR_CST_ELTS (arg0);
2408 v2 = TREE_VECTOR_CST_ELTS (arg1);
2409 while (v1 && v2)
2410 {
2411 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2412 flags))
2413 return 0;
2414 v1 = TREE_CHAIN (v1);
2415 v2 = TREE_CHAIN (v2);
2416 }
2417
2418 return 1;
2419 }
2420
2421 case COMPLEX_CST:
2422 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2423 flags)
2424 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2425 flags));
2426
2427 case STRING_CST:
2428 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2429 && ! memcmp (TREE_STRING_POINTER (arg0),
2430 TREE_STRING_POINTER (arg1),
2431 TREE_STRING_LENGTH (arg0)));
2432
2433 case ADDR_EXPR:
2434 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2435 0);
2436 default:
2437 break;
2438 }
2439
2440 if (flags & OEP_ONLY_CONST)
2441 return 0;
2442
2443 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2444 {
2445 case '1':
2446 /* Two conversions are equal only if signedness and modes match. */
2447 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2448 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
2449 != TYPE_UNSIGNED (TREE_TYPE (arg1))))
2450 return 0;
2451
2452 return operand_equal_p (TREE_OPERAND (arg0, 0),
2453 TREE_OPERAND (arg1, 0), flags);
2454
2455 case '<':
2456 case '2':
2457 if (operand_equal_p (TREE_OPERAND (arg0, 0),
2458 TREE_OPERAND (arg1, 0), flags)
2459 && operand_equal_p (TREE_OPERAND (arg0, 1),
2460 TREE_OPERAND (arg1, 1), flags))
2461 return 1;
2462
2463 /* For commutative ops, allow the other order. */
2464 return (commutative_tree_code (TREE_CODE (arg0))
2465 && operand_equal_p (TREE_OPERAND (arg0, 0),
2466 TREE_OPERAND (arg1, 1), flags)
2467 && operand_equal_p (TREE_OPERAND (arg0, 1),
2468 TREE_OPERAND (arg1, 0), flags));
2469
2470 case 'r':
2471 /* If either of the pointer (or reference) expressions we are
2472 dereferencing contain a side effect, these cannot be equal. */
2473 if (TREE_SIDE_EFFECTS (arg0)
2474 || TREE_SIDE_EFFECTS (arg1))
2475 return 0;
2476
2477 switch (TREE_CODE (arg0))
2478 {
2479 case INDIRECT_REF:
2480 return operand_equal_p (TREE_OPERAND (arg0, 0),
2481 TREE_OPERAND (arg1, 0), flags);
2482
2483 case COMPONENT_REF:
2484 case ARRAY_REF:
2485 case ARRAY_RANGE_REF:
2486 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2487 TREE_OPERAND (arg1, 0), flags)
2488 && operand_equal_p (TREE_OPERAND (arg0, 1),
2489 TREE_OPERAND (arg1, 1), flags));
2490
2491 case BIT_FIELD_REF:
2492 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2493 TREE_OPERAND (arg1, 0), flags)
2494 && operand_equal_p (TREE_OPERAND (arg0, 1),
2495 TREE_OPERAND (arg1, 1), flags)
2496 && operand_equal_p (TREE_OPERAND (arg0, 2),
2497 TREE_OPERAND (arg1, 2), flags));
2498 default:
2499 return 0;
2500 }
2501
2502 case 'e':
2503 switch (TREE_CODE (arg0))
2504 {
2505 case ADDR_EXPR:
2506 case TRUTH_NOT_EXPR:
2507 return operand_equal_p (TREE_OPERAND (arg0, 0),
2508 TREE_OPERAND (arg1, 0), flags);
2509
2510 case TRUTH_ANDIF_EXPR:
2511 case TRUTH_ORIF_EXPR:
2512 return operand_equal_p (TREE_OPERAND (arg0, 0),
2513 TREE_OPERAND (arg1, 0), flags)
2514 && operand_equal_p (TREE_OPERAND (arg0, 1),
2515 TREE_OPERAND (arg1, 1), flags);
2516
2517 case TRUTH_AND_EXPR:
2518 case TRUTH_OR_EXPR:
2519 case TRUTH_XOR_EXPR:
2520 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2521 TREE_OPERAND (arg1, 0), flags)
2522 && operand_equal_p (TREE_OPERAND (arg0, 1),
2523 TREE_OPERAND (arg1, 1), flags))
2524 || (operand_equal_p (TREE_OPERAND (arg0, 0),
2525 TREE_OPERAND (arg1, 1), flags)
2526 && operand_equal_p (TREE_OPERAND (arg0, 1),
2527 TREE_OPERAND (arg1, 0), flags));
2528
2529 case RTL_EXPR:
2530 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2531
2532 case CALL_EXPR:
2533 /* If the CALL_EXPRs call different functions, then they
2534 clearly can not be equal. */
2535 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2536 TREE_OPERAND (arg1, 0), flags))
2537 return 0;
2538
2539 {
2540 unsigned int cef = call_expr_flags (arg0);
2541 if (flags & OEP_PURE_SAME)
2542 cef &= ECF_CONST | ECF_PURE;
2543 else
2544 cef &= ECF_CONST;
2545 if (!cef)
2546 return 0;
2547 }
2548
2549 /* Now see if all the arguments are the same. operand_equal_p
2550 does not handle TREE_LIST, so we walk the operands here
2551 feeding them to operand_equal_p. */
2552 arg0 = TREE_OPERAND (arg0, 1);
2553 arg1 = TREE_OPERAND (arg1, 1);
2554 while (arg0 && arg1)
2555 {
2556 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2557 flags))
2558 return 0;
2559
2560 arg0 = TREE_CHAIN (arg0);
2561 arg1 = TREE_CHAIN (arg1);
2562 }
2563
2564 /* If we get here and both argument lists are exhausted
2565 then the CALL_EXPRs are equal. */
2566 return ! (arg0 || arg1);
2567
2568 default:
2569 return 0;
2570 }
2571
2572 case 'd':
2573 /* Consider __builtin_sqrt equal to sqrt. */
2574 return (TREE_CODE (arg0) == FUNCTION_DECL
2575 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2576 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2577 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2578
2579 default:
2580 return 0;
2581 }
2582 }
2583 \f
2584 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2585 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2586
2587 When in doubt, return 0. */
2588
2589 static int
2590 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2591 {
2592 int unsignedp1, unsignedpo;
2593 tree primarg0, primarg1, primother;
2594 unsigned int correct_width;
2595
2596 if (operand_equal_p (arg0, arg1, 0))
2597 return 1;
2598
2599 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2600 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2601 return 0;
2602
2603 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2604 and see if the inner values are the same. This removes any
2605 signedness comparison, which doesn't matter here. */
2606 primarg0 = arg0, primarg1 = arg1;
2607 STRIP_NOPS (primarg0);
2608 STRIP_NOPS (primarg1);
2609 if (operand_equal_p (primarg0, primarg1, 0))
2610 return 1;
2611
2612 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2613 actual comparison operand, ARG0.
2614
2615 First throw away any conversions to wider types
2616 already present in the operands. */
2617
2618 primarg1 = get_narrower (arg1, &unsignedp1);
2619 primother = get_narrower (other, &unsignedpo);
2620
2621 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2622 if (unsignedp1 == unsignedpo
2623 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2624 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2625 {
2626 tree type = TREE_TYPE (arg0);
2627
2628 /* Make sure shorter operand is extended the right way
2629 to match the longer operand. */
2630 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2631 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2632
2633 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2634 return 1;
2635 }
2636
2637 return 0;
2638 }
2639 \f
2640 /* See if ARG is an expression that is either a comparison or is performing
2641 arithmetic on comparisons. The comparisons must only be comparing
2642 two different values, which will be stored in *CVAL1 and *CVAL2; if
2643 they are nonzero it means that some operands have already been found.
2644 No variables may be used anywhere else in the expression except in the
2645 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2646 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2647
2648 If this is true, return 1. Otherwise, return zero. */
2649
2650 static int
2651 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2652 {
2653 enum tree_code code = TREE_CODE (arg);
2654 char class = TREE_CODE_CLASS (code);
2655
2656 /* We can handle some of the 'e' cases here. */
2657 if (class == 'e' && code == TRUTH_NOT_EXPR)
2658 class = '1';
2659 else if (class == 'e'
2660 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2661 || code == COMPOUND_EXPR))
2662 class = '2';
2663
2664 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2665 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2666 {
2667 /* If we've already found a CVAL1 or CVAL2, this expression is
2668 two complex to handle. */
2669 if (*cval1 || *cval2)
2670 return 0;
2671
2672 class = '1';
2673 *save_p = 1;
2674 }
2675
2676 switch (class)
2677 {
2678 case '1':
2679 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2680
2681 case '2':
2682 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2683 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2684 cval1, cval2, save_p));
2685
2686 case 'c':
2687 return 1;
2688
2689 case 'e':
2690 if (code == COND_EXPR)
2691 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2692 cval1, cval2, save_p)
2693 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2694 cval1, cval2, save_p)
2695 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2696 cval1, cval2, save_p));
2697 return 0;
2698
2699 case '<':
2700 /* First see if we can handle the first operand, then the second. For
2701 the second operand, we know *CVAL1 can't be zero. It must be that
2702 one side of the comparison is each of the values; test for the
2703 case where this isn't true by failing if the two operands
2704 are the same. */
2705
2706 if (operand_equal_p (TREE_OPERAND (arg, 0),
2707 TREE_OPERAND (arg, 1), 0))
2708 return 0;
2709
2710 if (*cval1 == 0)
2711 *cval1 = TREE_OPERAND (arg, 0);
2712 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2713 ;
2714 else if (*cval2 == 0)
2715 *cval2 = TREE_OPERAND (arg, 0);
2716 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2717 ;
2718 else
2719 return 0;
2720
2721 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2722 ;
2723 else if (*cval2 == 0)
2724 *cval2 = TREE_OPERAND (arg, 1);
2725 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2726 ;
2727 else
2728 return 0;
2729
2730 return 1;
2731
2732 default:
2733 return 0;
2734 }
2735 }
2736 \f
2737 /* ARG is a tree that is known to contain just arithmetic operations and
2738 comparisons. Evaluate the operations in the tree substituting NEW0 for
2739 any occurrence of OLD0 as an operand of a comparison and likewise for
2740 NEW1 and OLD1. */
2741
2742 static tree
2743 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2744 {
2745 tree type = TREE_TYPE (arg);
2746 enum tree_code code = TREE_CODE (arg);
2747 char class = TREE_CODE_CLASS (code);
2748
2749 /* We can handle some of the 'e' cases here. */
2750 if (class == 'e' && code == TRUTH_NOT_EXPR)
2751 class = '1';
2752 else if (class == 'e'
2753 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2754 class = '2';
2755
2756 switch (class)
2757 {
2758 case '1':
2759 return fold (build1 (code, type,
2760 eval_subst (TREE_OPERAND (arg, 0),
2761 old0, new0, old1, new1)));
2762
2763 case '2':
2764 return fold (build2 (code, type,
2765 eval_subst (TREE_OPERAND (arg, 0),
2766 old0, new0, old1, new1),
2767 eval_subst (TREE_OPERAND (arg, 1),
2768 old0, new0, old1, new1)));
2769
2770 case 'e':
2771 switch (code)
2772 {
2773 case SAVE_EXPR:
2774 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2775
2776 case COMPOUND_EXPR:
2777 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2778
2779 case COND_EXPR:
2780 return fold (build3 (code, type,
2781 eval_subst (TREE_OPERAND (arg, 0),
2782 old0, new0, old1, new1),
2783 eval_subst (TREE_OPERAND (arg, 1),
2784 old0, new0, old1, new1),
2785 eval_subst (TREE_OPERAND (arg, 2),
2786 old0, new0, old1, new1)));
2787 default:
2788 break;
2789 }
2790 /* Fall through - ??? */
2791
2792 case '<':
2793 {
2794 tree arg0 = TREE_OPERAND (arg, 0);
2795 tree arg1 = TREE_OPERAND (arg, 1);
2796
2797 /* We need to check both for exact equality and tree equality. The
2798 former will be true if the operand has a side-effect. In that
2799 case, we know the operand occurred exactly once. */
2800
2801 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2802 arg0 = new0;
2803 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2804 arg0 = new1;
2805
2806 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2807 arg1 = new0;
2808 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2809 arg1 = new1;
2810
2811 return fold (build2 (code, type, arg0, arg1));
2812 }
2813
2814 default:
2815 return arg;
2816 }
2817 }
2818 \f
2819 /* Return a tree for the case when the result of an expression is RESULT
2820 converted to TYPE and OMITTED was previously an operand of the expression
2821 but is now not needed (e.g., we folded OMITTED * 0).
2822
2823 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2824 the conversion of RESULT to TYPE. */
2825
2826 tree
2827 omit_one_operand (tree type, tree result, tree omitted)
2828 {
2829 tree t = fold_convert (type, result);
2830
2831 if (TREE_SIDE_EFFECTS (omitted))
2832 return build2 (COMPOUND_EXPR, type, omitted, t);
2833
2834 return non_lvalue (t);
2835 }
2836
2837 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2838
2839 static tree
2840 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2841 {
2842 tree t = fold_convert (type, result);
2843
2844 if (TREE_SIDE_EFFECTS (omitted))
2845 return build2 (COMPOUND_EXPR, type, omitted, t);
2846
2847 return pedantic_non_lvalue (t);
2848 }
2849
2850 /* Return a tree for the case when the result of an expression is RESULT
2851 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2852 of the expression but are now not needed.
2853
2854 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2855 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2856 evaluated before OMITTED2. Otherwise, if neither has side effects,
2857 just do the conversion of RESULT to TYPE. */
2858
2859 tree
2860 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2861 {
2862 tree t = fold_convert (type, result);
2863
2864 if (TREE_SIDE_EFFECTS (omitted2))
2865 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2866 if (TREE_SIDE_EFFECTS (omitted1))
2867 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2868
2869 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2870 }
2871
2872 \f
2873 /* Return a simplified tree node for the truth-negation of ARG. This
2874 never alters ARG itself. We assume that ARG is an operation that
2875 returns a truth value (0 or 1).
2876
2877 FIXME: one would think we would fold the result, but it causes
2878 problems with the dominator optimizer. */
2879 tree
2880 invert_truthvalue (tree arg)
2881 {
2882 tree type = TREE_TYPE (arg);
2883 enum tree_code code = TREE_CODE (arg);
2884
2885 if (code == ERROR_MARK)
2886 return arg;
2887
2888 /* If this is a comparison, we can simply invert it, except for
2889 floating-point non-equality comparisons, in which case we just
2890 enclose a TRUTH_NOT_EXPR around what we have. */
2891
2892 if (TREE_CODE_CLASS (code) == '<')
2893 {
2894 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2895 if (FLOAT_TYPE_P (op_type)
2896 && flag_trapping_math
2897 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2898 && code != NE_EXPR && code != EQ_EXPR)
2899 return build1 (TRUTH_NOT_EXPR, type, arg);
2900 else
2901 {
2902 code = invert_tree_comparison (code,
2903 HONOR_NANS (TYPE_MODE (op_type)));
2904 if (code == ERROR_MARK)
2905 return build1 (TRUTH_NOT_EXPR, type, arg);
2906 else
2907 return build2 (code, type,
2908 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2909 }
2910 }
2911
2912 switch (code)
2913 {
2914 case INTEGER_CST:
2915 return fold_convert (type, build_int_2 (integer_zerop (arg), 0));
2916
2917 case TRUTH_AND_EXPR:
2918 return build2 (TRUTH_OR_EXPR, type,
2919 invert_truthvalue (TREE_OPERAND (arg, 0)),
2920 invert_truthvalue (TREE_OPERAND (arg, 1)));
2921
2922 case TRUTH_OR_EXPR:
2923 return build2 (TRUTH_AND_EXPR, type,
2924 invert_truthvalue (TREE_OPERAND (arg, 0)),
2925 invert_truthvalue (TREE_OPERAND (arg, 1)));
2926
2927 case TRUTH_XOR_EXPR:
2928 /* Here we can invert either operand. We invert the first operand
2929 unless the second operand is a TRUTH_NOT_EXPR in which case our
2930 result is the XOR of the first operand with the inside of the
2931 negation of the second operand. */
2932
2933 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2934 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2935 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2936 else
2937 return build2 (TRUTH_XOR_EXPR, type,
2938 invert_truthvalue (TREE_OPERAND (arg, 0)),
2939 TREE_OPERAND (arg, 1));
2940
2941 case TRUTH_ANDIF_EXPR:
2942 return build2 (TRUTH_ORIF_EXPR, type,
2943 invert_truthvalue (TREE_OPERAND (arg, 0)),
2944 invert_truthvalue (TREE_OPERAND (arg, 1)));
2945
2946 case TRUTH_ORIF_EXPR:
2947 return build2 (TRUTH_ANDIF_EXPR, type,
2948 invert_truthvalue (TREE_OPERAND (arg, 0)),
2949 invert_truthvalue (TREE_OPERAND (arg, 1)));
2950
2951 case TRUTH_NOT_EXPR:
2952 return TREE_OPERAND (arg, 0);
2953
2954 case COND_EXPR:
2955 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2956 invert_truthvalue (TREE_OPERAND (arg, 1)),
2957 invert_truthvalue (TREE_OPERAND (arg, 2)));
2958
2959 case COMPOUND_EXPR:
2960 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2961 invert_truthvalue (TREE_OPERAND (arg, 1)));
2962
2963 case NON_LVALUE_EXPR:
2964 return invert_truthvalue (TREE_OPERAND (arg, 0));
2965
2966 case NOP_EXPR:
2967 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
2968 break;
2969
2970 case CONVERT_EXPR:
2971 case FLOAT_EXPR:
2972 return build1 (TREE_CODE (arg), type,
2973 invert_truthvalue (TREE_OPERAND (arg, 0)));
2974
2975 case BIT_AND_EXPR:
2976 if (!integer_onep (TREE_OPERAND (arg, 1)))
2977 break;
2978 return build2 (EQ_EXPR, type, arg,
2979 fold_convert (type, integer_zero_node));
2980
2981 case SAVE_EXPR:
2982 return build1 (TRUTH_NOT_EXPR, type, arg);
2983
2984 case CLEANUP_POINT_EXPR:
2985 return build1 (CLEANUP_POINT_EXPR, type,
2986 invert_truthvalue (TREE_OPERAND (arg, 0)));
2987
2988 default:
2989 break;
2990 }
2991 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2992 abort ();
2993 return build1 (TRUTH_NOT_EXPR, type, arg);
2994 }
2995
2996 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2997 operands are another bit-wise operation with a common input. If so,
2998 distribute the bit operations to save an operation and possibly two if
2999 constants are involved. For example, convert
3000 (A | B) & (A | C) into A | (B & C)
3001 Further simplification will occur if B and C are constants.
3002
3003 If this optimization cannot be done, 0 will be returned. */
3004
3005 static tree
3006 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3007 {
3008 tree common;
3009 tree left, right;
3010
3011 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3012 || TREE_CODE (arg0) == code
3013 || (TREE_CODE (arg0) != BIT_AND_EXPR
3014 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3015 return 0;
3016
3017 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3018 {
3019 common = TREE_OPERAND (arg0, 0);
3020 left = TREE_OPERAND (arg0, 1);
3021 right = TREE_OPERAND (arg1, 1);
3022 }
3023 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3024 {
3025 common = TREE_OPERAND (arg0, 0);
3026 left = TREE_OPERAND (arg0, 1);
3027 right = TREE_OPERAND (arg1, 0);
3028 }
3029 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3030 {
3031 common = TREE_OPERAND (arg0, 1);
3032 left = TREE_OPERAND (arg0, 0);
3033 right = TREE_OPERAND (arg1, 1);
3034 }
3035 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3036 {
3037 common = TREE_OPERAND (arg0, 1);
3038 left = TREE_OPERAND (arg0, 0);
3039 right = TREE_OPERAND (arg1, 0);
3040 }
3041 else
3042 return 0;
3043
3044 return fold (build2 (TREE_CODE (arg0), type, common,
3045 fold (build2 (code, type, left, right))));
3046 }
3047 \f
3048 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3049 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3050
3051 static tree
3052 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3053 int unsignedp)
3054 {
3055 tree result = build3 (BIT_FIELD_REF, type, inner,
3056 size_int (bitsize), bitsize_int (bitpos));
3057
3058 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3059
3060 return result;
3061 }
3062
3063 /* Optimize a bit-field compare.
3064
3065 There are two cases: First is a compare against a constant and the
3066 second is a comparison of two items where the fields are at the same
3067 bit position relative to the start of a chunk (byte, halfword, word)
3068 large enough to contain it. In these cases we can avoid the shift
3069 implicit in bitfield extractions.
3070
3071 For constants, we emit a compare of the shifted constant with the
3072 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3073 compared. For two fields at the same position, we do the ANDs with the
3074 similar mask and compare the result of the ANDs.
3075
3076 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3077 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3078 are the left and right operands of the comparison, respectively.
3079
3080 If the optimization described above can be done, we return the resulting
3081 tree. Otherwise we return zero. */
3082
3083 static tree
3084 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3085 tree lhs, tree rhs)
3086 {
3087 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3088 tree type = TREE_TYPE (lhs);
3089 tree signed_type, unsigned_type;
3090 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3091 enum machine_mode lmode, rmode, nmode;
3092 int lunsignedp, runsignedp;
3093 int lvolatilep = 0, rvolatilep = 0;
3094 tree linner, rinner = NULL_TREE;
3095 tree mask;
3096 tree offset;
3097
3098 /* Get all the information about the extractions being done. If the bit size
3099 if the same as the size of the underlying object, we aren't doing an
3100 extraction at all and so can do nothing. We also don't want to
3101 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3102 then will no longer be able to replace it. */
3103 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3104 &lunsignedp, &lvolatilep);
3105 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3106 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3107 return 0;
3108
3109 if (!const_p)
3110 {
3111 /* If this is not a constant, we can only do something if bit positions,
3112 sizes, and signedness are the same. */
3113 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3114 &runsignedp, &rvolatilep);
3115
3116 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3117 || lunsignedp != runsignedp || offset != 0
3118 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3119 return 0;
3120 }
3121
3122 /* See if we can find a mode to refer to this field. We should be able to,
3123 but fail if we can't. */
3124 nmode = get_best_mode (lbitsize, lbitpos,
3125 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3126 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3127 TYPE_ALIGN (TREE_TYPE (rinner))),
3128 word_mode, lvolatilep || rvolatilep);
3129 if (nmode == VOIDmode)
3130 return 0;
3131
3132 /* Set signed and unsigned types of the precision of this mode for the
3133 shifts below. */
3134 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3135 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3136
3137 /* Compute the bit position and size for the new reference and our offset
3138 within it. If the new reference is the same size as the original, we
3139 won't optimize anything, so return zero. */
3140 nbitsize = GET_MODE_BITSIZE (nmode);
3141 nbitpos = lbitpos & ~ (nbitsize - 1);
3142 lbitpos -= nbitpos;
3143 if (nbitsize == lbitsize)
3144 return 0;
3145
3146 if (BYTES_BIG_ENDIAN)
3147 lbitpos = nbitsize - lbitsize - lbitpos;
3148
3149 /* Make the mask to be used against the extracted field. */
3150 mask = build_int_2 (~0, ~0);
3151 TREE_TYPE (mask) = unsigned_type;
3152 force_fit_type (mask, 0);
3153 mask = fold_convert (unsigned_type, mask);
3154 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3155 mask = const_binop (RSHIFT_EXPR, mask,
3156 size_int (nbitsize - lbitsize - lbitpos), 0);
3157
3158 if (! const_p)
3159 /* If not comparing with constant, just rework the comparison
3160 and return. */
3161 return build2 (code, compare_type,
3162 build2 (BIT_AND_EXPR, unsigned_type,
3163 make_bit_field_ref (linner, unsigned_type,
3164 nbitsize, nbitpos, 1),
3165 mask),
3166 build2 (BIT_AND_EXPR, unsigned_type,
3167 make_bit_field_ref (rinner, unsigned_type,
3168 nbitsize, nbitpos, 1),
3169 mask));
3170
3171 /* Otherwise, we are handling the constant case. See if the constant is too
3172 big for the field. Warn and return a tree of for 0 (false) if so. We do
3173 this not only for its own sake, but to avoid having to test for this
3174 error case below. If we didn't, we might generate wrong code.
3175
3176 For unsigned fields, the constant shifted right by the field length should
3177 be all zero. For signed fields, the high-order bits should agree with
3178 the sign bit. */
3179
3180 if (lunsignedp)
3181 {
3182 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3183 fold_convert (unsigned_type, rhs),
3184 size_int (lbitsize), 0)))
3185 {
3186 warning ("comparison is always %d due to width of bit-field",
3187 code == NE_EXPR);
3188 return constant_boolean_node (code == NE_EXPR, compare_type);
3189 }
3190 }
3191 else
3192 {
3193 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3194 size_int (lbitsize - 1), 0);
3195 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3196 {
3197 warning ("comparison is always %d due to width of bit-field",
3198 code == NE_EXPR);
3199 return constant_boolean_node (code == NE_EXPR, compare_type);
3200 }
3201 }
3202
3203 /* Single-bit compares should always be against zero. */
3204 if (lbitsize == 1 && ! integer_zerop (rhs))
3205 {
3206 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3207 rhs = fold_convert (type, integer_zero_node);
3208 }
3209
3210 /* Make a new bitfield reference, shift the constant over the
3211 appropriate number of bits and mask it with the computed mask
3212 (in case this was a signed field). If we changed it, make a new one. */
3213 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3214 if (lvolatilep)
3215 {
3216 TREE_SIDE_EFFECTS (lhs) = 1;
3217 TREE_THIS_VOLATILE (lhs) = 1;
3218 }
3219
3220 rhs = fold (const_binop (BIT_AND_EXPR,
3221 const_binop (LSHIFT_EXPR,
3222 fold_convert (unsigned_type, rhs),
3223 size_int (lbitpos), 0),
3224 mask, 0));
3225
3226 return build2 (code, compare_type,
3227 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3228 rhs);
3229 }
3230 \f
3231 /* Subroutine for fold_truthop: decode a field reference.
3232
3233 If EXP is a comparison reference, we return the innermost reference.
3234
3235 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3236 set to the starting bit number.
3237
3238 If the innermost field can be completely contained in a mode-sized
3239 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3240
3241 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3242 otherwise it is not changed.
3243
3244 *PUNSIGNEDP is set to the signedness of the field.
3245
3246 *PMASK is set to the mask used. This is either contained in a
3247 BIT_AND_EXPR or derived from the width of the field.
3248
3249 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3250
3251 Return 0 if this is not a component reference or is one that we can't
3252 do anything with. */
3253
3254 static tree
3255 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3256 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3257 int *punsignedp, int *pvolatilep,
3258 tree *pmask, tree *pand_mask)
3259 {
3260 tree outer_type = 0;
3261 tree and_mask = 0;
3262 tree mask, inner, offset;
3263 tree unsigned_type;
3264 unsigned int precision;
3265
3266 /* All the optimizations using this function assume integer fields.
3267 There are problems with FP fields since the type_for_size call
3268 below can fail for, e.g., XFmode. */
3269 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3270 return 0;
3271
3272 /* We are interested in the bare arrangement of bits, so strip everything
3273 that doesn't affect the machine mode. However, record the type of the
3274 outermost expression if it may matter below. */
3275 if (TREE_CODE (exp) == NOP_EXPR
3276 || TREE_CODE (exp) == CONVERT_EXPR
3277 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3278 outer_type = TREE_TYPE (exp);
3279 STRIP_NOPS (exp);
3280
3281 if (TREE_CODE (exp) == BIT_AND_EXPR)
3282 {
3283 and_mask = TREE_OPERAND (exp, 1);
3284 exp = TREE_OPERAND (exp, 0);
3285 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3286 if (TREE_CODE (and_mask) != INTEGER_CST)
3287 return 0;
3288 }
3289
3290 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3291 punsignedp, pvolatilep);
3292 if ((inner == exp && and_mask == 0)
3293 || *pbitsize < 0 || offset != 0
3294 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3295 return 0;
3296
3297 /* If the number of bits in the reference is the same as the bitsize of
3298 the outer type, then the outer type gives the signedness. Otherwise
3299 (in case of a small bitfield) the signedness is unchanged. */
3300 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
3301 *punsignedp = TYPE_UNSIGNED (outer_type);
3302
3303 /* Compute the mask to access the bitfield. */
3304 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3305 precision = TYPE_PRECISION (unsigned_type);
3306
3307 mask = build_int_2 (~0, ~0);
3308 TREE_TYPE (mask) = unsigned_type;
3309 force_fit_type (mask, 0);
3310 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3311 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3312
3313 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3314 if (and_mask != 0)
3315 mask = fold (build2 (BIT_AND_EXPR, unsigned_type,
3316 fold_convert (unsigned_type, and_mask), mask));
3317
3318 *pmask = mask;
3319 *pand_mask = and_mask;
3320 return inner;
3321 }
3322
3323 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3324 bit positions. */
3325
3326 static int
3327 all_ones_mask_p (tree mask, int size)
3328 {
3329 tree type = TREE_TYPE (mask);
3330 unsigned int precision = TYPE_PRECISION (type);
3331 tree tmask;
3332
3333 tmask = build_int_2 (~0, ~0);
3334 TREE_TYPE (tmask) = lang_hooks.types.signed_type (type);
3335 force_fit_type (tmask, 0);
3336 return
3337 tree_int_cst_equal (mask,
3338 const_binop (RSHIFT_EXPR,
3339 const_binop (LSHIFT_EXPR, tmask,
3340 size_int (precision - size),
3341 0),
3342 size_int (precision - size), 0));
3343 }
3344
3345 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3346 represents the sign bit of EXP's type. If EXP represents a sign
3347 or zero extension, also test VAL against the unextended type.
3348 The return value is the (sub)expression whose sign bit is VAL,
3349 or NULL_TREE otherwise. */
3350
3351 static tree
3352 sign_bit_p (tree exp, tree val)
3353 {
3354 unsigned HOST_WIDE_INT mask_lo, lo;
3355 HOST_WIDE_INT mask_hi, hi;
3356 int width;
3357 tree t;
3358
3359 /* Tree EXP must have an integral type. */
3360 t = TREE_TYPE (exp);
3361 if (! INTEGRAL_TYPE_P (t))
3362 return NULL_TREE;
3363
3364 /* Tree VAL must be an integer constant. */
3365 if (TREE_CODE (val) != INTEGER_CST
3366 || TREE_CONSTANT_OVERFLOW (val))
3367 return NULL_TREE;
3368
3369 width = TYPE_PRECISION (t);
3370 if (width > HOST_BITS_PER_WIDE_INT)
3371 {
3372 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3373 lo = 0;
3374
3375 mask_hi = ((unsigned HOST_WIDE_INT) -1
3376 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3377 mask_lo = -1;
3378 }
3379 else
3380 {
3381 hi = 0;
3382 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3383
3384 mask_hi = 0;
3385 mask_lo = ((unsigned HOST_WIDE_INT) -1
3386 >> (HOST_BITS_PER_WIDE_INT - width));
3387 }
3388
3389 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3390 treat VAL as if it were unsigned. */
3391 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3392 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3393 return exp;
3394
3395 /* Handle extension from a narrower type. */
3396 if (TREE_CODE (exp) == NOP_EXPR
3397 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3398 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3399
3400 return NULL_TREE;
3401 }
3402
3403 /* Subroutine for fold_truthop: determine if an operand is simple enough
3404 to be evaluated unconditionally. */
3405
3406 static int
3407 simple_operand_p (tree exp)
3408 {
3409 /* Strip any conversions that don't change the machine mode. */
3410 while ((TREE_CODE (exp) == NOP_EXPR
3411 || TREE_CODE (exp) == CONVERT_EXPR)
3412 && (TYPE_MODE (TREE_TYPE (exp))
3413 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3414 exp = TREE_OPERAND (exp, 0);
3415
3416 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3417 || (DECL_P (exp)
3418 && ! TREE_ADDRESSABLE (exp)
3419 && ! TREE_THIS_VOLATILE (exp)
3420 && ! DECL_NONLOCAL (exp)
3421 /* Don't regard global variables as simple. They may be
3422 allocated in ways unknown to the compiler (shared memory,
3423 #pragma weak, etc). */
3424 && ! TREE_PUBLIC (exp)
3425 && ! DECL_EXTERNAL (exp)
3426 /* Loading a static variable is unduly expensive, but global
3427 registers aren't expensive. */
3428 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3429 }
3430 \f
3431 /* The following functions are subroutines to fold_range_test and allow it to
3432 try to change a logical combination of comparisons into a range test.
3433
3434 For example, both
3435 X == 2 || X == 3 || X == 4 || X == 5
3436 and
3437 X >= 2 && X <= 5
3438 are converted to
3439 (unsigned) (X - 2) <= 3
3440
3441 We describe each set of comparisons as being either inside or outside
3442 a range, using a variable named like IN_P, and then describe the
3443 range with a lower and upper bound. If one of the bounds is omitted,
3444 it represents either the highest or lowest value of the type.
3445
3446 In the comments below, we represent a range by two numbers in brackets
3447 preceded by a "+" to designate being inside that range, or a "-" to
3448 designate being outside that range, so the condition can be inverted by
3449 flipping the prefix. An omitted bound is represented by a "-". For
3450 example, "- [-, 10]" means being outside the range starting at the lowest
3451 possible value and ending at 10, in other words, being greater than 10.
3452 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3453 always false.
3454
3455 We set up things so that the missing bounds are handled in a consistent
3456 manner so neither a missing bound nor "true" and "false" need to be
3457 handled using a special case. */
3458
3459 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3460 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3461 and UPPER1_P are nonzero if the respective argument is an upper bound
3462 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3463 must be specified for a comparison. ARG1 will be converted to ARG0's
3464 type if both are specified. */
3465
3466 static tree
3467 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3468 tree arg1, int upper1_p)
3469 {
3470 tree tem;
3471 int result;
3472 int sgn0, sgn1;
3473
3474 /* If neither arg represents infinity, do the normal operation.
3475 Else, if not a comparison, return infinity. Else handle the special
3476 comparison rules. Note that most of the cases below won't occur, but
3477 are handled for consistency. */
3478
3479 if (arg0 != 0 && arg1 != 0)
3480 {
3481 tem = fold (build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3482 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3483 STRIP_NOPS (tem);
3484 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3485 }
3486
3487 if (TREE_CODE_CLASS (code) != '<')
3488 return 0;
3489
3490 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3491 for neither. In real maths, we cannot assume open ended ranges are
3492 the same. But, this is computer arithmetic, where numbers are finite.
3493 We can therefore make the transformation of any unbounded range with
3494 the value Z, Z being greater than any representable number. This permits
3495 us to treat unbounded ranges as equal. */
3496 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3497 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3498 switch (code)
3499 {
3500 case EQ_EXPR:
3501 result = sgn0 == sgn1;
3502 break;
3503 case NE_EXPR:
3504 result = sgn0 != sgn1;
3505 break;
3506 case LT_EXPR:
3507 result = sgn0 < sgn1;
3508 break;
3509 case LE_EXPR:
3510 result = sgn0 <= sgn1;
3511 break;
3512 case GT_EXPR:
3513 result = sgn0 > sgn1;
3514 break;
3515 case GE_EXPR:
3516 result = sgn0 >= sgn1;
3517 break;
3518 default:
3519 abort ();
3520 }
3521
3522 return constant_boolean_node (result, type);
3523 }
3524 \f
3525 /* Given EXP, a logical expression, set the range it is testing into
3526 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3527 actually being tested. *PLOW and *PHIGH will be made of the same type
3528 as the returned expression. If EXP is not a comparison, we will most
3529 likely not be returning a useful value and range. */
3530
3531 static tree
3532 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3533 {
3534 enum tree_code code;
3535 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3536 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3537 int in_p, n_in_p;
3538 tree low, high, n_low, n_high;
3539
3540 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3541 and see if we can refine the range. Some of the cases below may not
3542 happen, but it doesn't seem worth worrying about this. We "continue"
3543 the outer loop when we've changed something; otherwise we "break"
3544 the switch, which will "break" the while. */
3545
3546 in_p = 0;
3547 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3548
3549 while (1)
3550 {
3551 code = TREE_CODE (exp);
3552 exp_type = TREE_TYPE (exp);
3553
3554 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3555 {
3556 if (first_rtl_op (code) > 0)
3557 arg0 = TREE_OPERAND (exp, 0);
3558 if (TREE_CODE_CLASS (code) == '<'
3559 || TREE_CODE_CLASS (code) == '1'
3560 || TREE_CODE_CLASS (code) == '2')
3561 arg0_type = TREE_TYPE (arg0);
3562 if (TREE_CODE_CLASS (code) == '2'
3563 || TREE_CODE_CLASS (code) == '<'
3564 || (TREE_CODE_CLASS (code) == 'e'
3565 && TREE_CODE_LENGTH (code) > 1))
3566 arg1 = TREE_OPERAND (exp, 1);
3567 }
3568
3569 switch (code)
3570 {
3571 case TRUTH_NOT_EXPR:
3572 in_p = ! in_p, exp = arg0;
3573 continue;
3574
3575 case EQ_EXPR: case NE_EXPR:
3576 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3577 /* We can only do something if the range is testing for zero
3578 and if the second operand is an integer constant. Note that
3579 saying something is "in" the range we make is done by
3580 complementing IN_P since it will set in the initial case of
3581 being not equal to zero; "out" is leaving it alone. */
3582 if (low == 0 || high == 0
3583 || ! integer_zerop (low) || ! integer_zerop (high)
3584 || TREE_CODE (arg1) != INTEGER_CST)
3585 break;
3586
3587 switch (code)
3588 {
3589 case NE_EXPR: /* - [c, c] */
3590 low = high = arg1;
3591 break;
3592 case EQ_EXPR: /* + [c, c] */
3593 in_p = ! in_p, low = high = arg1;
3594 break;
3595 case GT_EXPR: /* - [-, c] */
3596 low = 0, high = arg1;
3597 break;
3598 case GE_EXPR: /* + [c, -] */
3599 in_p = ! in_p, low = arg1, high = 0;
3600 break;
3601 case LT_EXPR: /* - [c, -] */
3602 low = arg1, high = 0;
3603 break;
3604 case LE_EXPR: /* + [-, c] */
3605 in_p = ! in_p, low = 0, high = arg1;
3606 break;
3607 default:
3608 abort ();
3609 }
3610
3611 /* If this is an unsigned comparison, we also know that EXP is
3612 greater than or equal to zero. We base the range tests we make
3613 on that fact, so we record it here so we can parse existing
3614 range tests. We test arg0_type since often the return type
3615 of, e.g. EQ_EXPR, is boolean. */
3616 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3617 {
3618 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3619 1, fold_convert (arg0_type, integer_zero_node),
3620 NULL_TREE))
3621 break;
3622
3623 in_p = n_in_p, low = n_low, high = n_high;
3624
3625 /* If the high bound is missing, but we have a nonzero low
3626 bound, reverse the range so it goes from zero to the low bound
3627 minus 1. */
3628 if (high == 0 && low && ! integer_zerop (low))
3629 {
3630 in_p = ! in_p;
3631 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3632 integer_one_node, 0);
3633 low = fold_convert (arg0_type, integer_zero_node);
3634 }
3635 }
3636
3637 exp = arg0;
3638 continue;
3639
3640 case NEGATE_EXPR:
3641 /* (-x) IN [a,b] -> x in [-b, -a] */
3642 n_low = range_binop (MINUS_EXPR, exp_type,
3643 fold_convert (exp_type, integer_zero_node),
3644 0, high, 1);
3645 n_high = range_binop (MINUS_EXPR, exp_type,
3646 fold_convert (exp_type, integer_zero_node),
3647 0, low, 0);
3648 low = n_low, high = n_high;
3649 exp = arg0;
3650 continue;
3651
3652 case BIT_NOT_EXPR:
3653 /* ~ X -> -X - 1 */
3654 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3655 fold_convert (exp_type, integer_one_node));
3656 continue;
3657
3658 case PLUS_EXPR: case MINUS_EXPR:
3659 if (TREE_CODE (arg1) != INTEGER_CST)
3660 break;
3661
3662 /* If EXP is signed, any overflow in the computation is undefined,
3663 so we don't worry about it so long as our computations on
3664 the bounds don't overflow. For unsigned, overflow is defined
3665 and this is exactly the right thing. */
3666 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3667 arg0_type, low, 0, arg1, 0);
3668 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3669 arg0_type, high, 1, arg1, 0);
3670 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3671 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3672 break;
3673
3674 /* Check for an unsigned range which has wrapped around the maximum
3675 value thus making n_high < n_low, and normalize it. */
3676 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3677 {
3678 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3679 integer_one_node, 0);
3680 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3681 integer_one_node, 0);
3682
3683 /* If the range is of the form +/- [ x+1, x ], we won't
3684 be able to normalize it. But then, it represents the
3685 whole range or the empty set, so make it
3686 +/- [ -, - ]. */
3687 if (tree_int_cst_equal (n_low, low)
3688 && tree_int_cst_equal (n_high, high))
3689 low = high = 0;
3690 else
3691 in_p = ! in_p;
3692 }
3693 else
3694 low = n_low, high = n_high;
3695
3696 exp = arg0;
3697 continue;
3698
3699 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3700 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3701 break;
3702
3703 if (! INTEGRAL_TYPE_P (arg0_type)
3704 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3705 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3706 break;
3707
3708 n_low = low, n_high = high;
3709
3710 if (n_low != 0)
3711 n_low = fold_convert (arg0_type, n_low);
3712
3713 if (n_high != 0)
3714 n_high = fold_convert (arg0_type, n_high);
3715
3716
3717 /* If we're converting arg0 from an unsigned type, to exp,
3718 a signed type, we will be doing the compairson as unsigned.
3719 The tests above have already verified that LOW and HIGH
3720 are both positive.
3721
3722 So we have to ensure that we will handle large unsigned
3723 values the same way that the current signed bounds treat
3724 negative values. */
3725
3726 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3727 {
3728 tree high_positive;
3729 tree equiv_type = lang_hooks.types.type_for_mode
3730 (TYPE_MODE (arg0_type), 1);
3731
3732 /* A range without an upper bound is, naturally, unbounded.
3733 Since convert would have cropped a very large value, use
3734 the max value for the destination type. */
3735 high_positive
3736 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3737 : TYPE_MAX_VALUE (arg0_type);
3738
3739 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3740 high_positive = fold (build2 (RSHIFT_EXPR, arg0_type,
3741 fold_convert (arg0_type,
3742 high_positive),
3743 fold_convert (arg0_type,
3744 integer_one_node)));
3745
3746 /* If the low bound is specified, "and" the range with the
3747 range for which the original unsigned value will be
3748 positive. */
3749 if (low != 0)
3750 {
3751 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3752 1, n_low, n_high, 1,
3753 fold_convert (arg0_type, integer_zero_node),
3754 high_positive))
3755 break;
3756
3757 in_p = (n_in_p == in_p);
3758 }
3759 else
3760 {
3761 /* Otherwise, "or" the range with the range of the input
3762 that will be interpreted as negative. */
3763 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3764 0, n_low, n_high, 1,
3765 fold_convert (arg0_type, integer_zero_node),
3766 high_positive))
3767 break;
3768
3769 in_p = (in_p != n_in_p);
3770 }
3771 }
3772
3773 exp = arg0;
3774 low = n_low, high = n_high;
3775 continue;
3776
3777 default:
3778 break;
3779 }
3780
3781 break;
3782 }
3783
3784 /* If EXP is a constant, we can evaluate whether this is true or false. */
3785 if (TREE_CODE (exp) == INTEGER_CST)
3786 {
3787 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3788 exp, 0, low, 0))
3789 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3790 exp, 1, high, 1)));
3791 low = high = 0;
3792 exp = 0;
3793 }
3794
3795 *pin_p = in_p, *plow = low, *phigh = high;
3796 return exp;
3797 }
3798 \f
3799 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3800 type, TYPE, return an expression to test if EXP is in (or out of, depending
3801 on IN_P) the range. Return 0 if the test couldn't be created. */
3802
3803 static tree
3804 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3805 {
3806 tree etype = TREE_TYPE (exp);
3807 tree value;
3808
3809 if (! in_p)
3810 {
3811 value = build_range_check (type, exp, 1, low, high);
3812 if (value != 0)
3813 return invert_truthvalue (value);
3814
3815 return 0;
3816 }
3817
3818 if (low == 0 && high == 0)
3819 return fold_convert (type, integer_one_node);
3820
3821 if (low == 0)
3822 return fold (build2 (LE_EXPR, type, exp, high));
3823
3824 if (high == 0)
3825 return fold (build2 (GE_EXPR, type, exp, low));
3826
3827 if (operand_equal_p (low, high, 0))
3828 return fold (build2 (EQ_EXPR, type, exp, low));
3829
3830 if (integer_zerop (low))
3831 {
3832 if (! TYPE_UNSIGNED (etype))
3833 {
3834 etype = lang_hooks.types.unsigned_type (etype);
3835 high = fold_convert (etype, high);
3836 exp = fold_convert (etype, exp);
3837 }
3838 return build_range_check (type, exp, 1, 0, high);
3839 }
3840
3841 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3842 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3843 {
3844 unsigned HOST_WIDE_INT lo;
3845 HOST_WIDE_INT hi;
3846 int prec;
3847
3848 prec = TYPE_PRECISION (etype);
3849 if (prec <= HOST_BITS_PER_WIDE_INT)
3850 {
3851 hi = 0;
3852 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3853 }
3854 else
3855 {
3856 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3857 lo = (unsigned HOST_WIDE_INT) -1;
3858 }
3859
3860 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3861 {
3862 if (TYPE_UNSIGNED (etype))
3863 {
3864 etype = lang_hooks.types.signed_type (etype);
3865 exp = fold_convert (etype, exp);
3866 }
3867 return fold (build2 (GT_EXPR, type, exp,
3868 fold_convert (etype, integer_zero_node)));
3869 }
3870 }
3871
3872 value = const_binop (MINUS_EXPR, high, low, 0);
3873 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3874 {
3875 tree utype, minv, maxv;
3876
3877 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3878 for the type in question, as we rely on this here. */
3879 switch (TREE_CODE (etype))
3880 {
3881 case INTEGER_TYPE:
3882 case ENUMERAL_TYPE:
3883 case CHAR_TYPE:
3884 utype = lang_hooks.types.unsigned_type (etype);
3885 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3886 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3887 integer_one_node, 1);
3888 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
3889 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
3890 minv, 1, maxv, 1)))
3891 {
3892 etype = utype;
3893 high = fold_convert (etype, high);
3894 low = fold_convert (etype, low);
3895 exp = fold_convert (etype, exp);
3896 value = const_binop (MINUS_EXPR, high, low, 0);
3897 }
3898 break;
3899 default:
3900 break;
3901 }
3902 }
3903
3904 if (value != 0 && ! TREE_OVERFLOW (value))
3905 return build_range_check (type,
3906 fold (build2 (MINUS_EXPR, etype, exp, low)),
3907 1, fold_convert (etype, integer_zero_node),
3908 value);
3909
3910 return 0;
3911 }
3912 \f
3913 /* Given two ranges, see if we can merge them into one. Return 1 if we
3914 can, 0 if we can't. Set the output range into the specified parameters. */
3915
3916 static int
3917 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3918 tree high0, int in1_p, tree low1, tree high1)
3919 {
3920 int no_overlap;
3921 int subset;
3922 int temp;
3923 tree tem;
3924 int in_p;
3925 tree low, high;
3926 int lowequal = ((low0 == 0 && low1 == 0)
3927 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3928 low0, 0, low1, 0)));
3929 int highequal = ((high0 == 0 && high1 == 0)
3930 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3931 high0, 1, high1, 1)));
3932
3933 /* Make range 0 be the range that starts first, or ends last if they
3934 start at the same value. Swap them if it isn't. */
3935 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3936 low0, 0, low1, 0))
3937 || (lowequal
3938 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3939 high1, 1, high0, 1))))
3940 {
3941 temp = in0_p, in0_p = in1_p, in1_p = temp;
3942 tem = low0, low0 = low1, low1 = tem;
3943 tem = high0, high0 = high1, high1 = tem;
3944 }
3945
3946 /* Now flag two cases, whether the ranges are disjoint or whether the
3947 second range is totally subsumed in the first. Note that the tests
3948 below are simplified by the ones above. */
3949 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3950 high0, 1, low1, 0));
3951 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3952 high1, 1, high0, 1));
3953
3954 /* We now have four cases, depending on whether we are including or
3955 excluding the two ranges. */
3956 if (in0_p && in1_p)
3957 {
3958 /* If they don't overlap, the result is false. If the second range
3959 is a subset it is the result. Otherwise, the range is from the start
3960 of the second to the end of the first. */
3961 if (no_overlap)
3962 in_p = 0, low = high = 0;
3963 else if (subset)
3964 in_p = 1, low = low1, high = high1;
3965 else
3966 in_p = 1, low = low1, high = high0;
3967 }
3968
3969 else if (in0_p && ! in1_p)
3970 {
3971 /* If they don't overlap, the result is the first range. If they are
3972 equal, the result is false. If the second range is a subset of the
3973 first, and the ranges begin at the same place, we go from just after
3974 the end of the first range to the end of the second. If the second
3975 range is not a subset of the first, or if it is a subset and both
3976 ranges end at the same place, the range starts at the start of the
3977 first range and ends just before the second range.
3978 Otherwise, we can't describe this as a single range. */
3979 if (no_overlap)
3980 in_p = 1, low = low0, high = high0;
3981 else if (lowequal && highequal)
3982 in_p = 0, low = high = 0;
3983 else if (subset && lowequal)
3984 {
3985 in_p = 1, high = high0;
3986 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3987 integer_one_node, 0);
3988 }
3989 else if (! subset || highequal)
3990 {
3991 in_p = 1, low = low0;
3992 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3993 integer_one_node, 0);
3994 }
3995 else
3996 return 0;
3997 }
3998
3999 else if (! in0_p && in1_p)
4000 {
4001 /* If they don't overlap, the result is the second range. If the second
4002 is a subset of the first, the result is false. Otherwise,
4003 the range starts just after the first range and ends at the
4004 end of the second. */
4005 if (no_overlap)
4006 in_p = 1, low = low1, high = high1;
4007 else if (subset || highequal)
4008 in_p = 0, low = high = 0;
4009 else
4010 {
4011 in_p = 1, high = high1;
4012 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4013 integer_one_node, 0);
4014 }
4015 }
4016
4017 else
4018 {
4019 /* The case where we are excluding both ranges. Here the complex case
4020 is if they don't overlap. In that case, the only time we have a
4021 range is if they are adjacent. If the second is a subset of the
4022 first, the result is the first. Otherwise, the range to exclude
4023 starts at the beginning of the first range and ends at the end of the
4024 second. */
4025 if (no_overlap)
4026 {
4027 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4028 range_binop (PLUS_EXPR, NULL_TREE,
4029 high0, 1,
4030 integer_one_node, 1),
4031 1, low1, 0)))
4032 in_p = 0, low = low0, high = high1;
4033 else
4034 {
4035 /* Canonicalize - [min, x] into - [-, x]. */
4036 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4037 switch (TREE_CODE (TREE_TYPE (low0)))
4038 {
4039 case ENUMERAL_TYPE:
4040 if (TYPE_PRECISION (TREE_TYPE (low0))
4041 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4042 break;
4043 /* FALLTHROUGH */
4044 case INTEGER_TYPE:
4045 case CHAR_TYPE:
4046 if (tree_int_cst_equal (low0,
4047 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4048 low0 = 0;
4049 break;
4050 case POINTER_TYPE:
4051 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4052 && integer_zerop (low0))
4053 low0 = 0;
4054 break;
4055 default:
4056 break;
4057 }
4058
4059 /* Canonicalize - [x, max] into - [x, -]. */
4060 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4061 switch (TREE_CODE (TREE_TYPE (high1)))
4062 {
4063 case ENUMERAL_TYPE:
4064 if (TYPE_PRECISION (TREE_TYPE (high1))
4065 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4066 break;
4067 /* FALLTHROUGH */
4068 case INTEGER_TYPE:
4069 case CHAR_TYPE:
4070 if (tree_int_cst_equal (high1,
4071 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4072 high1 = 0;
4073 break;
4074 case POINTER_TYPE:
4075 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4076 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4077 high1, 1,
4078 integer_one_node, 1)))
4079 high1 = 0;
4080 break;
4081 default:
4082 break;
4083 }
4084
4085 /* The ranges might be also adjacent between the maximum and
4086 minimum values of the given type. For
4087 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4088 return + [x + 1, y - 1]. */
4089 if (low0 == 0 && high1 == 0)
4090 {
4091 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4092 integer_one_node, 1);
4093 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4094 integer_one_node, 0);
4095 if (low == 0 || high == 0)
4096 return 0;
4097
4098 in_p = 1;
4099 }
4100 else
4101 return 0;
4102 }
4103 }
4104 else if (subset)
4105 in_p = 0, low = low0, high = high0;
4106 else
4107 in_p = 0, low = low0, high = high1;
4108 }
4109
4110 *pin_p = in_p, *plow = low, *phigh = high;
4111 return 1;
4112 }
4113 \f
4114
4115 /* Subroutine of fold, looking inside expressions of the form
4116 A op B ? A : C, where ARG0 is A op B and ARG2 is C. This
4117 function is being used also to optimize A op B ? C : A, by
4118 reversing the comparison first.
4119
4120 Return a folded expression whose code is not a COND_EXPR
4121 anymore, or NULL_TREE if no folding opportunity is found. */
4122
4123 static tree
4124 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg2)
4125 {
4126 enum tree_code comp_code = TREE_CODE (arg0);
4127 tree arg00 = TREE_OPERAND (arg0, 0);
4128 tree arg01 = TREE_OPERAND (arg0, 1);
4129 tree tem;
4130 STRIP_NOPS (arg2);
4131
4132 /* If we have A op 0 ? A : -A, consider applying the following
4133 transformations:
4134
4135 A == 0? A : -A same as -A
4136 A != 0? A : -A same as A
4137 A >= 0? A : -A same as abs (A)
4138 A > 0? A : -A same as abs (A)
4139 A <= 0? A : -A same as -abs (A)
4140 A < 0? A : -A same as -abs (A)
4141
4142 None of these transformations work for modes with signed
4143 zeros. If A is +/-0, the first two transformations will
4144 change the sign of the result (from +0 to -0, or vice
4145 versa). The last four will fix the sign of the result,
4146 even though the original expressions could be positive or
4147 negative, depending on the sign of A.
4148
4149 Note that all these transformations are correct if A is
4150 NaN, since the two alternatives (A and -A) are also NaNs. */
4151 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4152 ? real_zerop (arg01)
4153 : integer_zerop (arg01))
4154 && TREE_CODE (arg2) == NEGATE_EXPR
4155 && operand_equal_p (TREE_OPERAND (arg2, 0), arg00, 0))
4156 switch (comp_code)
4157 {
4158 case EQ_EXPR:
4159 return fold_convert (type, negate_expr (arg00));
4160 case NE_EXPR:
4161 return pedantic_non_lvalue (fold_convert (type, arg00));
4162 case GE_EXPR:
4163 case GT_EXPR:
4164 if (TYPE_UNSIGNED (TREE_TYPE (arg00)))
4165 arg00 = fold_convert (lang_hooks.types.signed_type
4166 (TREE_TYPE (arg00)), arg00);
4167 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg00), arg00));
4168 return pedantic_non_lvalue (fold_convert (type, tem));
4169 case LE_EXPR:
4170 case LT_EXPR:
4171 if (TYPE_UNSIGNED (TREE_TYPE (arg00)))
4172 arg00 = fold_convert (lang_hooks.types.signed_type
4173 (TREE_TYPE (arg00)), arg00);
4174 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg00), arg00));
4175 return negate_expr (fold_convert (type, tem));
4176 default:
4177 abort ();
4178 }
4179
4180 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4181 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4182 both transformations are correct when A is NaN: A != 0
4183 is then true, and A == 0 is false. */
4184
4185 if (integer_zerop (arg01) && integer_zerop (arg2))
4186 {
4187 if (comp_code == NE_EXPR)
4188 return pedantic_non_lvalue (fold_convert (type, arg00));
4189 else if (comp_code == EQ_EXPR)
4190 return pedantic_non_lvalue (fold_convert (type, integer_zero_node));
4191 }
4192
4193 /* Try some transformations of A op B ? A : B.
4194
4195 A == B? A : B same as B
4196 A != B? A : B same as A
4197 A >= B? A : B same as max (A, B)
4198 A > B? A : B same as max (B, A)
4199 A <= B? A : B same as min (A, B)
4200 A < B? A : B same as min (B, A)
4201
4202 As above, these transformations don't work in the presence
4203 of signed zeros. For example, if A and B are zeros of
4204 opposite sign, the first two transformations will change
4205 the sign of the result. In the last four, the original
4206 expressions give different results for (A=+0, B=-0) and
4207 (A=-0, B=+0), but the transformed expressions do not.
4208
4209 The first two transformations are correct if either A or B
4210 is a NaN. In the first transformation, the condition will
4211 be false, and B will indeed be chosen. In the case of the
4212 second transformation, the condition A != B will be true,
4213 and A will be chosen.
4214
4215 The conversions to max() and min() are not correct if B is
4216 a number and A is not. The conditions in the original
4217 expressions will be false, so all four give B. The min()
4218 and max() versions would give a NaN instead. */
4219 if (operand_equal_for_comparison_p (arg01, arg2, arg00))
4220 {
4221 tree comp_op0 = arg00;
4222 tree comp_op1 = arg01;
4223 tree comp_type = TREE_TYPE (comp_op0);
4224
4225 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4226 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4227 {
4228 comp_type = type;
4229 comp_op0 = arg00;
4230 comp_op1 = arg2;
4231 }
4232
4233 switch (comp_code)
4234 {
4235 case EQ_EXPR:
4236 return pedantic_non_lvalue (fold_convert (type, arg2));
4237 case NE_EXPR:
4238 return pedantic_non_lvalue (fold_convert (type, arg00));
4239 case LE_EXPR:
4240 case LT_EXPR:
4241 /* In C++ a ?: expression can be an lvalue, so put the
4242 operand which will be used if they are equal first
4243 so that we can convert this back to the
4244 corresponding COND_EXPR. */
4245 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00))))
4246 return pedantic_non_lvalue (
4247 fold_convert (type, fold (build2 (MIN_EXPR, comp_type,
4248 (comp_code == LE_EXPR
4249 ? comp_op0 : comp_op1),
4250 (comp_code == LE_EXPR
4251 ? comp_op1 : comp_op0)))));
4252 break;
4253 case GE_EXPR:
4254 case GT_EXPR:
4255 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00))))
4256 return pedantic_non_lvalue (
4257 fold_convert (type, fold (build2 (MAX_EXPR, comp_type,
4258 (comp_code == GE_EXPR
4259 ? comp_op0 : comp_op1),
4260 (comp_code == GE_EXPR
4261 ? comp_op1 : comp_op0)))));
4262 break;
4263 default:
4264 abort ();
4265 }
4266 }
4267
4268 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4269 we might still be able to simplify this. For example,
4270 if C1 is one less or one more than C2, this might have started
4271 out as a MIN or MAX and been transformed by this function.
4272 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4273
4274 if (INTEGRAL_TYPE_P (type)
4275 && TREE_CODE (arg01) == INTEGER_CST
4276 && TREE_CODE (arg2) == INTEGER_CST)
4277 switch (comp_code)
4278 {
4279 case EQ_EXPR:
4280 /* We can replace A with C1 in this case. */
4281 arg00 = fold_convert (type, arg01);
4282 return fold (build3 (COND_EXPR, type, arg0, arg00, arg2));
4283
4284 case LT_EXPR:
4285 /* If C1 is C2 + 1, this is min(A, C2). */
4286 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4287 OEP_ONLY_CONST)
4288 && operand_equal_p (arg01,
4289 const_binop (PLUS_EXPR, arg2,
4290 integer_one_node, 0),
4291 OEP_ONLY_CONST))
4292 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4293 type, arg00, arg2)));
4294 break;
4295
4296 case LE_EXPR:
4297 /* If C1 is C2 - 1, this is min(A, C2). */
4298 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4299 OEP_ONLY_CONST)
4300 && operand_equal_p (arg01,
4301 const_binop (MINUS_EXPR, arg2,
4302 integer_one_node, 0),
4303 OEP_ONLY_CONST))
4304 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4305 type, arg00, arg2)));
4306 break;
4307
4308 case GT_EXPR:
4309 /* If C1 is C2 - 1, this is max(A, C2). */
4310 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4311 OEP_ONLY_CONST)
4312 && operand_equal_p (arg01,
4313 const_binop (MINUS_EXPR, arg2,
4314 integer_one_node, 0),
4315 OEP_ONLY_CONST))
4316 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4317 type, arg00, arg2)));
4318 break;
4319
4320 case GE_EXPR:
4321 /* If C1 is C2 + 1, this is max(A, C2). */
4322 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4323 OEP_ONLY_CONST)
4324 && operand_equal_p (arg01,
4325 const_binop (PLUS_EXPR, arg2,
4326 integer_one_node, 0),
4327 OEP_ONLY_CONST))
4328 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4329 type, arg00, arg2)));
4330 break;
4331 case NE_EXPR:
4332 break;
4333 default:
4334 abort ();
4335 }
4336
4337 return NULL_TREE;
4338 }
4339
4340
4341 \f
4342 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
4343 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4344 #endif
4345
4346 /* EXP is some logical combination of boolean tests. See if we can
4347 merge it into some range test. Return the new tree if so. */
4348
4349 static tree
4350 fold_range_test (tree exp)
4351 {
4352 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
4353 || TREE_CODE (exp) == TRUTH_OR_EXPR);
4354 int in0_p, in1_p, in_p;
4355 tree low0, low1, low, high0, high1, high;
4356 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
4357 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
4358 tree tem;
4359
4360 /* If this is an OR operation, invert both sides; we will invert
4361 again at the end. */
4362 if (or_op)
4363 in0_p = ! in0_p, in1_p = ! in1_p;
4364
4365 /* If both expressions are the same, if we can merge the ranges, and we
4366 can build the range test, return it or it inverted. If one of the
4367 ranges is always true or always false, consider it to be the same
4368 expression as the other. */
4369 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4370 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4371 in1_p, low1, high1)
4372 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
4373 lhs != 0 ? lhs
4374 : rhs != 0 ? rhs : integer_zero_node,
4375 in_p, low, high))))
4376 return or_op ? invert_truthvalue (tem) : tem;
4377
4378 /* On machines where the branch cost is expensive, if this is a
4379 short-circuited branch and the underlying object on both sides
4380 is the same, make a non-short-circuit operation. */
4381 else if (RANGE_TEST_NON_SHORT_CIRCUIT
4382 && lhs != 0 && rhs != 0
4383 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4384 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
4385 && operand_equal_p (lhs, rhs, 0))
4386 {
4387 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4388 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4389 which cases we can't do this. */
4390 if (simple_operand_p (lhs))
4391 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4392 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4393 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
4394 TREE_OPERAND (exp, 1));
4395
4396 else if (lang_hooks.decls.global_bindings_p () == 0
4397 && ! CONTAINS_PLACEHOLDER_P (lhs))
4398 {
4399 tree common = save_expr (lhs);
4400
4401 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
4402 or_op ? ! in0_p : in0_p,
4403 low0, high0))
4404 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
4405 or_op ? ! in1_p : in1_p,
4406 low1, high1))))
4407 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4408 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4409 TREE_TYPE (exp), lhs, rhs);
4410 }
4411 }
4412
4413 return 0;
4414 }
4415 \f
4416 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4417 bit value. Arrange things so the extra bits will be set to zero if and
4418 only if C is signed-extended to its full width. If MASK is nonzero,
4419 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4420
4421 static tree
4422 unextend (tree c, int p, int unsignedp, tree mask)
4423 {
4424 tree type = TREE_TYPE (c);
4425 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4426 tree temp;
4427
4428 if (p == modesize || unsignedp)
4429 return c;
4430
4431 /* We work by getting just the sign bit into the low-order bit, then
4432 into the high-order bit, then sign-extend. We then XOR that value
4433 with C. */
4434 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4435 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4436
4437 /* We must use a signed type in order to get an arithmetic right shift.
4438 However, we must also avoid introducing accidental overflows, so that
4439 a subsequent call to integer_zerop will work. Hence we must
4440 do the type conversion here. At this point, the constant is either
4441 zero or one, and the conversion to a signed type can never overflow.
4442 We could get an overflow if this conversion is done anywhere else. */
4443 if (TYPE_UNSIGNED (type))
4444 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4445
4446 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4447 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4448 if (mask != 0)
4449 temp = const_binop (BIT_AND_EXPR, temp,
4450 fold_convert (TREE_TYPE (c), mask), 0);
4451 /* If necessary, convert the type back to match the type of C. */
4452 if (TYPE_UNSIGNED (type))
4453 temp = fold_convert (type, temp);
4454
4455 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4456 }
4457 \f
4458 /* Find ways of folding logical expressions of LHS and RHS:
4459 Try to merge two comparisons to the same innermost item.
4460 Look for range tests like "ch >= '0' && ch <= '9'".
4461 Look for combinations of simple terms on machines with expensive branches
4462 and evaluate the RHS unconditionally.
4463
4464 For example, if we have p->a == 2 && p->b == 4 and we can make an
4465 object large enough to span both A and B, we can do this with a comparison
4466 against the object ANDed with the a mask.
4467
4468 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4469 operations to do this with one comparison.
4470
4471 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4472 function and the one above.
4473
4474 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4475 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4476
4477 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4478 two operands.
4479
4480 We return the simplified tree or 0 if no optimization is possible. */
4481
4482 static tree
4483 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4484 {
4485 /* If this is the "or" of two comparisons, we can do something if
4486 the comparisons are NE_EXPR. If this is the "and", we can do something
4487 if the comparisons are EQ_EXPR. I.e.,
4488 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4489
4490 WANTED_CODE is this operation code. For single bit fields, we can
4491 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4492 comparison for one-bit fields. */
4493
4494 enum tree_code wanted_code;
4495 enum tree_code lcode, rcode;
4496 tree ll_arg, lr_arg, rl_arg, rr_arg;
4497 tree ll_inner, lr_inner, rl_inner, rr_inner;
4498 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4499 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4500 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4501 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4502 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4503 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4504 enum machine_mode lnmode, rnmode;
4505 tree ll_mask, lr_mask, rl_mask, rr_mask;
4506 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4507 tree l_const, r_const;
4508 tree lntype, rntype, result;
4509 int first_bit, end_bit;
4510 int volatilep;
4511
4512 /* Start by getting the comparison codes. Fail if anything is volatile.
4513 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4514 it were surrounded with a NE_EXPR. */
4515
4516 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4517 return 0;
4518
4519 lcode = TREE_CODE (lhs);
4520 rcode = TREE_CODE (rhs);
4521
4522 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4523 {
4524 lhs = build2 (NE_EXPR, truth_type, lhs, integer_zero_node);
4525 lcode = NE_EXPR;
4526 }
4527
4528 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4529 {
4530 rhs = build2 (NE_EXPR, truth_type, rhs, integer_zero_node);
4531 rcode = NE_EXPR;
4532 }
4533
4534 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
4535 return 0;
4536
4537 ll_arg = TREE_OPERAND (lhs, 0);
4538 lr_arg = TREE_OPERAND (lhs, 1);
4539 rl_arg = TREE_OPERAND (rhs, 0);
4540 rr_arg = TREE_OPERAND (rhs, 1);
4541
4542 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4543 if (simple_operand_p (ll_arg)
4544 && simple_operand_p (lr_arg))
4545 {
4546 tree result;
4547 if (operand_equal_p (ll_arg, rl_arg, 0)
4548 && operand_equal_p (lr_arg, rr_arg, 0))
4549 {
4550 result = combine_comparisons (code, lcode, rcode,
4551 truth_type, ll_arg, lr_arg);
4552 if (result)
4553 return result;
4554 }
4555 else if (operand_equal_p (ll_arg, rr_arg, 0)
4556 && operand_equal_p (lr_arg, rl_arg, 0))
4557 {
4558 result = combine_comparisons (code, lcode,
4559 swap_tree_comparison (rcode),
4560 truth_type, ll_arg, lr_arg);
4561 if (result)
4562 return result;
4563 }
4564 }
4565
4566 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4567 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4568
4569 /* If the RHS can be evaluated unconditionally and its operands are
4570 simple, it wins to evaluate the RHS unconditionally on machines
4571 with expensive branches. In this case, this isn't a comparison
4572 that can be merged. Avoid doing this if the RHS is a floating-point
4573 comparison since those can trap. */
4574
4575 if (BRANCH_COST >= 2
4576 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4577 && simple_operand_p (rl_arg)
4578 && simple_operand_p (rr_arg))
4579 {
4580 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4581 if (code == TRUTH_OR_EXPR
4582 && lcode == NE_EXPR && integer_zerop (lr_arg)
4583 && rcode == NE_EXPR && integer_zerop (rr_arg)
4584 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4585 return build2 (NE_EXPR, truth_type,
4586 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4587 ll_arg, rl_arg),
4588 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4589
4590 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4591 if (code == TRUTH_AND_EXPR
4592 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4593 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4594 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4595 return build2 (EQ_EXPR, truth_type,
4596 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4597 ll_arg, rl_arg),
4598 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4599
4600 return build2 (code, truth_type, lhs, rhs);
4601 }
4602
4603 /* See if the comparisons can be merged. Then get all the parameters for
4604 each side. */
4605
4606 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4607 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4608 return 0;
4609
4610 volatilep = 0;
4611 ll_inner = decode_field_reference (ll_arg,
4612 &ll_bitsize, &ll_bitpos, &ll_mode,
4613 &ll_unsignedp, &volatilep, &ll_mask,
4614 &ll_and_mask);
4615 lr_inner = decode_field_reference (lr_arg,
4616 &lr_bitsize, &lr_bitpos, &lr_mode,
4617 &lr_unsignedp, &volatilep, &lr_mask,
4618 &lr_and_mask);
4619 rl_inner = decode_field_reference (rl_arg,
4620 &rl_bitsize, &rl_bitpos, &rl_mode,
4621 &rl_unsignedp, &volatilep, &rl_mask,
4622 &rl_and_mask);
4623 rr_inner = decode_field_reference (rr_arg,
4624 &rr_bitsize, &rr_bitpos, &rr_mode,
4625 &rr_unsignedp, &volatilep, &rr_mask,
4626 &rr_and_mask);
4627
4628 /* It must be true that the inner operation on the lhs of each
4629 comparison must be the same if we are to be able to do anything.
4630 Then see if we have constants. If not, the same must be true for
4631 the rhs's. */
4632 if (volatilep || ll_inner == 0 || rl_inner == 0
4633 || ! operand_equal_p (ll_inner, rl_inner, 0))
4634 return 0;
4635
4636 if (TREE_CODE (lr_arg) == INTEGER_CST
4637 && TREE_CODE (rr_arg) == INTEGER_CST)
4638 l_const = lr_arg, r_const = rr_arg;
4639 else if (lr_inner == 0 || rr_inner == 0
4640 || ! operand_equal_p (lr_inner, rr_inner, 0))
4641 return 0;
4642 else
4643 l_const = r_const = 0;
4644
4645 /* If either comparison code is not correct for our logical operation,
4646 fail. However, we can convert a one-bit comparison against zero into
4647 the opposite comparison against that bit being set in the field. */
4648
4649 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4650 if (lcode != wanted_code)
4651 {
4652 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4653 {
4654 /* Make the left operand unsigned, since we are only interested
4655 in the value of one bit. Otherwise we are doing the wrong
4656 thing below. */
4657 ll_unsignedp = 1;
4658 l_const = ll_mask;
4659 }
4660 else
4661 return 0;
4662 }
4663
4664 /* This is analogous to the code for l_const above. */
4665 if (rcode != wanted_code)
4666 {
4667 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4668 {
4669 rl_unsignedp = 1;
4670 r_const = rl_mask;
4671 }
4672 else
4673 return 0;
4674 }
4675
4676 /* After this point all optimizations will generate bit-field
4677 references, which we might not want. */
4678 if (! lang_hooks.can_use_bit_fields_p ())
4679 return 0;
4680
4681 /* See if we can find a mode that contains both fields being compared on
4682 the left. If we can't, fail. Otherwise, update all constants and masks
4683 to be relative to a field of that size. */
4684 first_bit = MIN (ll_bitpos, rl_bitpos);
4685 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4686 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4687 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4688 volatilep);
4689 if (lnmode == VOIDmode)
4690 return 0;
4691
4692 lnbitsize = GET_MODE_BITSIZE (lnmode);
4693 lnbitpos = first_bit & ~ (lnbitsize - 1);
4694 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4695 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4696
4697 if (BYTES_BIG_ENDIAN)
4698 {
4699 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4700 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4701 }
4702
4703 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4704 size_int (xll_bitpos), 0);
4705 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4706 size_int (xrl_bitpos), 0);
4707
4708 if (l_const)
4709 {
4710 l_const = fold_convert (lntype, l_const);
4711 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4712 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4713 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4714 fold (build1 (BIT_NOT_EXPR,
4715 lntype, ll_mask)),
4716 0)))
4717 {
4718 warning ("comparison is always %d", wanted_code == NE_EXPR);
4719
4720 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4721 }
4722 }
4723 if (r_const)
4724 {
4725 r_const = fold_convert (lntype, r_const);
4726 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4727 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4728 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4729 fold (build1 (BIT_NOT_EXPR,
4730 lntype, rl_mask)),
4731 0)))
4732 {
4733 warning ("comparison is always %d", wanted_code == NE_EXPR);
4734
4735 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4736 }
4737 }
4738
4739 /* If the right sides are not constant, do the same for it. Also,
4740 disallow this optimization if a size or signedness mismatch occurs
4741 between the left and right sides. */
4742 if (l_const == 0)
4743 {
4744 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4745 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4746 /* Make sure the two fields on the right
4747 correspond to the left without being swapped. */
4748 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4749 return 0;
4750
4751 first_bit = MIN (lr_bitpos, rr_bitpos);
4752 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4753 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4754 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4755 volatilep);
4756 if (rnmode == VOIDmode)
4757 return 0;
4758
4759 rnbitsize = GET_MODE_BITSIZE (rnmode);
4760 rnbitpos = first_bit & ~ (rnbitsize - 1);
4761 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4762 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4763
4764 if (BYTES_BIG_ENDIAN)
4765 {
4766 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4767 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4768 }
4769
4770 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4771 size_int (xlr_bitpos), 0);
4772 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4773 size_int (xrr_bitpos), 0);
4774
4775 /* Make a mask that corresponds to both fields being compared.
4776 Do this for both items being compared. If the operands are the
4777 same size and the bits being compared are in the same position
4778 then we can do this by masking both and comparing the masked
4779 results. */
4780 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4781 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4782 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4783 {
4784 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4785 ll_unsignedp || rl_unsignedp);
4786 if (! all_ones_mask_p (ll_mask, lnbitsize))
4787 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4788
4789 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4790 lr_unsignedp || rr_unsignedp);
4791 if (! all_ones_mask_p (lr_mask, rnbitsize))
4792 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4793
4794 return build2 (wanted_code, truth_type, lhs, rhs);
4795 }
4796
4797 /* There is still another way we can do something: If both pairs of
4798 fields being compared are adjacent, we may be able to make a wider
4799 field containing them both.
4800
4801 Note that we still must mask the lhs/rhs expressions. Furthermore,
4802 the mask must be shifted to account for the shift done by
4803 make_bit_field_ref. */
4804 if ((ll_bitsize + ll_bitpos == rl_bitpos
4805 && lr_bitsize + lr_bitpos == rr_bitpos)
4806 || (ll_bitpos == rl_bitpos + rl_bitsize
4807 && lr_bitpos == rr_bitpos + rr_bitsize))
4808 {
4809 tree type;
4810
4811 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4812 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4813 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4814 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4815
4816 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4817 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4818 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4819 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4820
4821 /* Convert to the smaller type before masking out unwanted bits. */
4822 type = lntype;
4823 if (lntype != rntype)
4824 {
4825 if (lnbitsize > rnbitsize)
4826 {
4827 lhs = fold_convert (rntype, lhs);
4828 ll_mask = fold_convert (rntype, ll_mask);
4829 type = rntype;
4830 }
4831 else if (lnbitsize < rnbitsize)
4832 {
4833 rhs = fold_convert (lntype, rhs);
4834 lr_mask = fold_convert (lntype, lr_mask);
4835 type = lntype;
4836 }
4837 }
4838
4839 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4840 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4841
4842 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4843 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4844
4845 return build2 (wanted_code, truth_type, lhs, rhs);
4846 }
4847
4848 return 0;
4849 }
4850
4851 /* Handle the case of comparisons with constants. If there is something in
4852 common between the masks, those bits of the constants must be the same.
4853 If not, the condition is always false. Test for this to avoid generating
4854 incorrect code below. */
4855 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4856 if (! integer_zerop (result)
4857 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4858 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4859 {
4860 if (wanted_code == NE_EXPR)
4861 {
4862 warning ("`or' of unmatched not-equal tests is always 1");
4863 return constant_boolean_node (true, truth_type);
4864 }
4865 else
4866 {
4867 warning ("`and' of mutually exclusive equal-tests is always 0");
4868 return constant_boolean_node (false, truth_type);
4869 }
4870 }
4871
4872 /* Construct the expression we will return. First get the component
4873 reference we will make. Unless the mask is all ones the width of
4874 that field, perform the mask operation. Then compare with the
4875 merged constant. */
4876 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4877 ll_unsignedp || rl_unsignedp);
4878
4879 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4880 if (! all_ones_mask_p (ll_mask, lnbitsize))
4881 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4882
4883 return build2 (wanted_code, truth_type, result,
4884 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4885 }
4886 \f
4887 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4888 constant. */
4889
4890 static tree
4891 optimize_minmax_comparison (tree t)
4892 {
4893 tree type = TREE_TYPE (t);
4894 tree arg0 = TREE_OPERAND (t, 0);
4895 enum tree_code op_code;
4896 tree comp_const = TREE_OPERAND (t, 1);
4897 tree minmax_const;
4898 int consts_equal, consts_lt;
4899 tree inner;
4900
4901 STRIP_SIGN_NOPS (arg0);
4902
4903 op_code = TREE_CODE (arg0);
4904 minmax_const = TREE_OPERAND (arg0, 1);
4905 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4906 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4907 inner = TREE_OPERAND (arg0, 0);
4908
4909 /* If something does not permit us to optimize, return the original tree. */
4910 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4911 || TREE_CODE (comp_const) != INTEGER_CST
4912 || TREE_CONSTANT_OVERFLOW (comp_const)
4913 || TREE_CODE (minmax_const) != INTEGER_CST
4914 || TREE_CONSTANT_OVERFLOW (minmax_const))
4915 return t;
4916
4917 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4918 and GT_EXPR, doing the rest with recursive calls using logical
4919 simplifications. */
4920 switch (TREE_CODE (t))
4921 {
4922 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4923 return
4924 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4925
4926 case GE_EXPR:
4927 return
4928 fold (build2 (TRUTH_ORIF_EXPR, type,
4929 optimize_minmax_comparison
4930 (build2 (EQ_EXPR, type, arg0, comp_const)),
4931 optimize_minmax_comparison
4932 (build2 (GT_EXPR, type, arg0, comp_const))));
4933
4934 case EQ_EXPR:
4935 if (op_code == MAX_EXPR && consts_equal)
4936 /* MAX (X, 0) == 0 -> X <= 0 */
4937 return fold (build2 (LE_EXPR, type, inner, comp_const));
4938
4939 else if (op_code == MAX_EXPR && consts_lt)
4940 /* MAX (X, 0) == 5 -> X == 5 */
4941 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4942
4943 else if (op_code == MAX_EXPR)
4944 /* MAX (X, 0) == -1 -> false */
4945 return omit_one_operand (type, integer_zero_node, inner);
4946
4947 else if (consts_equal)
4948 /* MIN (X, 0) == 0 -> X >= 0 */
4949 return fold (build2 (GE_EXPR, type, inner, comp_const));
4950
4951 else if (consts_lt)
4952 /* MIN (X, 0) == 5 -> false */
4953 return omit_one_operand (type, integer_zero_node, inner);
4954
4955 else
4956 /* MIN (X, 0) == -1 -> X == -1 */
4957 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4958
4959 case GT_EXPR:
4960 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4961 /* MAX (X, 0) > 0 -> X > 0
4962 MAX (X, 0) > 5 -> X > 5 */
4963 return fold (build2 (GT_EXPR, type, inner, comp_const));
4964
4965 else if (op_code == MAX_EXPR)
4966 /* MAX (X, 0) > -1 -> true */
4967 return omit_one_operand (type, integer_one_node, inner);
4968
4969 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4970 /* MIN (X, 0) > 0 -> false
4971 MIN (X, 0) > 5 -> false */
4972 return omit_one_operand (type, integer_zero_node, inner);
4973
4974 else
4975 /* MIN (X, 0) > -1 -> X > -1 */
4976 return fold (build2 (GT_EXPR, type, inner, comp_const));
4977
4978 default:
4979 return t;
4980 }
4981 }
4982 \f
4983 /* T is an integer expression that is being multiplied, divided, or taken a
4984 modulus (CODE says which and what kind of divide or modulus) by a
4985 constant C. See if we can eliminate that operation by folding it with
4986 other operations already in T. WIDE_TYPE, if non-null, is a type that
4987 should be used for the computation if wider than our type.
4988
4989 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4990 (X * 2) + (Y * 4). We must, however, be assured that either the original
4991 expression would not overflow or that overflow is undefined for the type
4992 in the language in question.
4993
4994 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4995 the machine has a multiply-accumulate insn or that this is part of an
4996 addressing calculation.
4997
4998 If we return a non-null expression, it is an equivalent form of the
4999 original computation, but need not be in the original type. */
5000
5001 static tree
5002 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5003 {
5004 /* To avoid exponential search depth, refuse to allow recursion past
5005 three levels. Beyond that (1) it's highly unlikely that we'll find
5006 something interesting and (2) we've probably processed it before
5007 when we built the inner expression. */
5008
5009 static int depth;
5010 tree ret;
5011
5012 if (depth > 3)
5013 return NULL;
5014
5015 depth++;
5016 ret = extract_muldiv_1 (t, c, code, wide_type);
5017 depth--;
5018
5019 return ret;
5020 }
5021
5022 static tree
5023 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5024 {
5025 tree type = TREE_TYPE (t);
5026 enum tree_code tcode = TREE_CODE (t);
5027 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5028 > GET_MODE_SIZE (TYPE_MODE (type)))
5029 ? wide_type : type);
5030 tree t1, t2;
5031 int same_p = tcode == code;
5032 tree op0 = NULL_TREE, op1 = NULL_TREE;
5033
5034 /* Don't deal with constants of zero here; they confuse the code below. */
5035 if (integer_zerop (c))
5036 return NULL_TREE;
5037
5038 if (TREE_CODE_CLASS (tcode) == '1')
5039 op0 = TREE_OPERAND (t, 0);
5040
5041 if (TREE_CODE_CLASS (tcode) == '2')
5042 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5043
5044 /* Note that we need not handle conditional operations here since fold
5045 already handles those cases. So just do arithmetic here. */
5046 switch (tcode)
5047 {
5048 case INTEGER_CST:
5049 /* For a constant, we can always simplify if we are a multiply
5050 or (for divide and modulus) if it is a multiple of our constant. */
5051 if (code == MULT_EXPR
5052 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5053 return const_binop (code, fold_convert (ctype, t),
5054 fold_convert (ctype, c), 0);
5055 break;
5056
5057 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5058 /* If op0 is an expression ... */
5059 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
5060 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
5061 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
5062 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
5063 /* ... and is unsigned, and its type is smaller than ctype,
5064 then we cannot pass through as widening. */
5065 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5066 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5067 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5068 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5069 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5070 /* ... or its type is larger than ctype,
5071 then we cannot pass through this truncation. */
5072 || (GET_MODE_SIZE (TYPE_MODE (ctype))
5073 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5074 /* ... or signedness changes for division or modulus,
5075 then we cannot pass through this conversion. */
5076 || (code != MULT_EXPR
5077 && (TYPE_UNSIGNED (ctype)
5078 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5079 break;
5080
5081 /* Pass the constant down and see if we can make a simplification. If
5082 we can, replace this expression with the inner simplification for
5083 possible later conversion to our or some other type. */
5084 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5085 && TREE_CODE (t2) == INTEGER_CST
5086 && ! TREE_CONSTANT_OVERFLOW (t2)
5087 && (0 != (t1 = extract_muldiv (op0, t2, code,
5088 code == MULT_EXPR
5089 ? ctype : NULL_TREE))))
5090 return t1;
5091 break;
5092
5093 case NEGATE_EXPR: case ABS_EXPR:
5094 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5095 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
5096 break;
5097
5098 case MIN_EXPR: case MAX_EXPR:
5099 /* If widening the type changes the signedness, then we can't perform
5100 this optimization as that changes the result. */
5101 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5102 break;
5103
5104 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5105 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5106 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5107 {
5108 if (tree_int_cst_sgn (c) < 0)
5109 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5110
5111 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5112 fold_convert (ctype, t2)));
5113 }
5114 break;
5115
5116 case LSHIFT_EXPR: case RSHIFT_EXPR:
5117 /* If the second operand is constant, this is a multiplication
5118 or floor division, by a power of two, so we can treat it that
5119 way unless the multiplier or divisor overflows. */
5120 if (TREE_CODE (op1) == INTEGER_CST
5121 /* const_binop may not detect overflow correctly,
5122 so check for it explicitly here. */
5123 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5124 && TREE_INT_CST_HIGH (op1) == 0
5125 && 0 != (t1 = fold_convert (ctype,
5126 const_binop (LSHIFT_EXPR,
5127 size_one_node,
5128 op1, 0)))
5129 && ! TREE_OVERFLOW (t1))
5130 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5131 ? MULT_EXPR : FLOOR_DIV_EXPR,
5132 ctype, fold_convert (ctype, op0), t1),
5133 c, code, wide_type);
5134 break;
5135
5136 case PLUS_EXPR: case MINUS_EXPR:
5137 /* See if we can eliminate the operation on both sides. If we can, we
5138 can return a new PLUS or MINUS. If we can't, the only remaining
5139 cases where we can do anything are if the second operand is a
5140 constant. */
5141 t1 = extract_muldiv (op0, c, code, wide_type);
5142 t2 = extract_muldiv (op1, c, code, wide_type);
5143 if (t1 != 0 && t2 != 0
5144 && (code == MULT_EXPR
5145 /* If not multiplication, we can only do this if both operands
5146 are divisible by c. */
5147 || (multiple_of_p (ctype, op0, c)
5148 && multiple_of_p (ctype, op1, c))))
5149 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5150 fold_convert (ctype, t2)));
5151
5152 /* If this was a subtraction, negate OP1 and set it to be an addition.
5153 This simplifies the logic below. */
5154 if (tcode == MINUS_EXPR)
5155 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5156
5157 if (TREE_CODE (op1) != INTEGER_CST)
5158 break;
5159
5160 /* If either OP1 or C are negative, this optimization is not safe for
5161 some of the division and remainder types while for others we need
5162 to change the code. */
5163 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5164 {
5165 if (code == CEIL_DIV_EXPR)
5166 code = FLOOR_DIV_EXPR;
5167 else if (code == FLOOR_DIV_EXPR)
5168 code = CEIL_DIV_EXPR;
5169 else if (code != MULT_EXPR
5170 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5171 break;
5172 }
5173
5174 /* If it's a multiply or a division/modulus operation of a multiple
5175 of our constant, do the operation and verify it doesn't overflow. */
5176 if (code == MULT_EXPR
5177 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5178 {
5179 op1 = const_binop (code, fold_convert (ctype, op1),
5180 fold_convert (ctype, c), 0);
5181 /* We allow the constant to overflow with wrapping semantics. */
5182 if (op1 == 0
5183 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5184 break;
5185 }
5186 else
5187 break;
5188
5189 /* If we have an unsigned type is not a sizetype, we cannot widen
5190 the operation since it will change the result if the original
5191 computation overflowed. */
5192 if (TYPE_UNSIGNED (ctype)
5193 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5194 && ctype != type)
5195 break;
5196
5197 /* If we were able to eliminate our operation from the first side,
5198 apply our operation to the second side and reform the PLUS. */
5199 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5200 return fold (build2 (tcode, ctype, fold_convert (ctype, t1), op1));
5201
5202 /* The last case is if we are a multiply. In that case, we can
5203 apply the distributive law to commute the multiply and addition
5204 if the multiplication of the constants doesn't overflow. */
5205 if (code == MULT_EXPR)
5206 return fold (build2 (tcode, ctype,
5207 fold (build2 (code, ctype,
5208 fold_convert (ctype, op0),
5209 fold_convert (ctype, c))),
5210 op1));
5211
5212 break;
5213
5214 case MULT_EXPR:
5215 /* We have a special case here if we are doing something like
5216 (C * 8) % 4 since we know that's zero. */
5217 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5218 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5219 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5220 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5221 return omit_one_operand (type, integer_zero_node, op0);
5222
5223 /* ... fall through ... */
5224
5225 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5226 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5227 /* If we can extract our operation from the LHS, do so and return a
5228 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5229 do something only if the second operand is a constant. */
5230 if (same_p
5231 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5232 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5233 fold_convert (ctype, op1)));
5234 else if (tcode == MULT_EXPR && code == MULT_EXPR
5235 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5236 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5237 fold_convert (ctype, t1)));
5238 else if (TREE_CODE (op1) != INTEGER_CST)
5239 return 0;
5240
5241 /* If these are the same operation types, we can associate them
5242 assuming no overflow. */
5243 if (tcode == code
5244 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5245 fold_convert (ctype, c), 0))
5246 && ! TREE_OVERFLOW (t1))
5247 return fold (build2 (tcode, ctype, fold_convert (ctype, op0), t1));
5248
5249 /* If these operations "cancel" each other, we have the main
5250 optimizations of this pass, which occur when either constant is a
5251 multiple of the other, in which case we replace this with either an
5252 operation or CODE or TCODE.
5253
5254 If we have an unsigned type that is not a sizetype, we cannot do
5255 this since it will change the result if the original computation
5256 overflowed. */
5257 if ((! TYPE_UNSIGNED (ctype)
5258 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5259 && ! flag_wrapv
5260 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5261 || (tcode == MULT_EXPR
5262 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5263 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5264 {
5265 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5266 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5267 fold_convert (ctype,
5268 const_binop (TRUNC_DIV_EXPR,
5269 op1, c, 0))));
5270 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5271 return fold (build2 (code, ctype, fold_convert (ctype, op0),
5272 fold_convert (ctype,
5273 const_binop (TRUNC_DIV_EXPR,
5274 c, op1, 0))));
5275 }
5276 break;
5277
5278 default:
5279 break;
5280 }
5281
5282 return 0;
5283 }
5284 \f
5285 /* Return a node which has the indicated constant VALUE (either 0 or
5286 1), and is of the indicated TYPE. */
5287
5288 static tree
5289 constant_boolean_node (int value, tree type)
5290 {
5291 if (type == integer_type_node)
5292 return value ? integer_one_node : integer_zero_node;
5293 else if (type == boolean_type_node)
5294 return value ? boolean_true_node : boolean_false_node;
5295 else if (TREE_CODE (type) == BOOLEAN_TYPE)
5296 return lang_hooks.truthvalue_conversion (value ? integer_one_node
5297 : integer_zero_node);
5298 else
5299 {
5300 tree t = build_int_2 (value, 0);
5301
5302 TREE_TYPE (t) = type;
5303 return t;
5304 }
5305 }
5306
5307 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5308 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5309 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5310 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5311 COND is the first argument to CODE; otherwise (as in the example
5312 given here), it is the second argument. TYPE is the type of the
5313 original expression. Return NULL_TREE if no simplification is
5314 possible. */
5315
5316 static tree
5317 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
5318 tree cond, tree arg, int cond_first_p)
5319 {
5320 tree test, true_value, false_value;
5321 tree lhs = NULL_TREE;
5322 tree rhs = NULL_TREE;
5323
5324 /* This transformation is only worthwhile if we don't have to wrap
5325 arg in a SAVE_EXPR, and the operation can be simplified on atleast
5326 one of the branches once its pushed inside the COND_EXPR. */
5327 if (!TREE_CONSTANT (arg))
5328 return NULL_TREE;
5329
5330 if (TREE_CODE (cond) == COND_EXPR)
5331 {
5332 test = TREE_OPERAND (cond, 0);
5333 true_value = TREE_OPERAND (cond, 1);
5334 false_value = TREE_OPERAND (cond, 2);
5335 /* If this operand throws an expression, then it does not make
5336 sense to try to perform a logical or arithmetic operation
5337 involving it. */
5338 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5339 lhs = true_value;
5340 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5341 rhs = false_value;
5342 }
5343 else
5344 {
5345 tree testtype = TREE_TYPE (cond);
5346 test = cond;
5347 true_value = constant_boolean_node (true, testtype);
5348 false_value = constant_boolean_node (false, testtype);
5349 }
5350
5351 if (lhs == 0)
5352 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
5353 : build2 (code, type, arg, true_value));
5354 if (rhs == 0)
5355 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
5356 : build2 (code, type, arg, false_value));
5357
5358 test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
5359 return fold_convert (type, test);
5360 }
5361
5362 \f
5363 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5364
5365 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5366 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5367 ADDEND is the same as X.
5368
5369 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5370 and finite. The problematic cases are when X is zero, and its mode
5371 has signed zeros. In the case of rounding towards -infinity,
5372 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5373 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5374
5375 static bool
5376 fold_real_zero_addition_p (tree type, tree addend, int negate)
5377 {
5378 if (!real_zerop (addend))
5379 return false;
5380
5381 /* Don't allow the fold with -fsignaling-nans. */
5382 if (HONOR_SNANS (TYPE_MODE (type)))
5383 return false;
5384
5385 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5386 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5387 return true;
5388
5389 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5390 if (TREE_CODE (addend) == REAL_CST
5391 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5392 negate = !negate;
5393
5394 /* The mode has signed zeros, and we have to honor their sign.
5395 In this situation, there is only one case we can return true for.
5396 X - 0 is the same as X unless rounding towards -infinity is
5397 supported. */
5398 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5399 }
5400
5401 /* Subroutine of fold() that checks comparisons of built-in math
5402 functions against real constants.
5403
5404 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5405 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5406 is the type of the result and ARG0 and ARG1 are the operands of the
5407 comparison. ARG1 must be a TREE_REAL_CST.
5408
5409 The function returns the constant folded tree if a simplification
5410 can be made, and NULL_TREE otherwise. */
5411
5412 static tree
5413 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5414 tree type, tree arg0, tree arg1)
5415 {
5416 REAL_VALUE_TYPE c;
5417
5418 if (BUILTIN_SQRT_P (fcode))
5419 {
5420 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5421 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5422
5423 c = TREE_REAL_CST (arg1);
5424 if (REAL_VALUE_NEGATIVE (c))
5425 {
5426 /* sqrt(x) < y is always false, if y is negative. */
5427 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5428 return omit_one_operand (type, integer_zero_node, arg);
5429
5430 /* sqrt(x) > y is always true, if y is negative and we
5431 don't care about NaNs, i.e. negative values of x. */
5432 if (code == NE_EXPR || !HONOR_NANS (mode))
5433 return omit_one_operand (type, integer_one_node, arg);
5434
5435 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5436 return fold (build2 (GE_EXPR, type, arg,
5437 build_real (TREE_TYPE (arg), dconst0)));
5438 }
5439 else if (code == GT_EXPR || code == GE_EXPR)
5440 {
5441 REAL_VALUE_TYPE c2;
5442
5443 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5444 real_convert (&c2, mode, &c2);
5445
5446 if (REAL_VALUE_ISINF (c2))
5447 {
5448 /* sqrt(x) > y is x == +Inf, when y is very large. */
5449 if (HONOR_INFINITIES (mode))
5450 return fold (build2 (EQ_EXPR, type, arg,
5451 build_real (TREE_TYPE (arg), c2)));
5452
5453 /* sqrt(x) > y is always false, when y is very large
5454 and we don't care about infinities. */
5455 return omit_one_operand (type, integer_zero_node, arg);
5456 }
5457
5458 /* sqrt(x) > c is the same as x > c*c. */
5459 return fold (build2 (code, type, arg,
5460 build_real (TREE_TYPE (arg), c2)));
5461 }
5462 else if (code == LT_EXPR || code == LE_EXPR)
5463 {
5464 REAL_VALUE_TYPE c2;
5465
5466 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5467 real_convert (&c2, mode, &c2);
5468
5469 if (REAL_VALUE_ISINF (c2))
5470 {
5471 /* sqrt(x) < y is always true, when y is a very large
5472 value and we don't care about NaNs or Infinities. */
5473 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5474 return omit_one_operand (type, integer_one_node, arg);
5475
5476 /* sqrt(x) < y is x != +Inf when y is very large and we
5477 don't care about NaNs. */
5478 if (! HONOR_NANS (mode))
5479 return fold (build2 (NE_EXPR, type, arg,
5480 build_real (TREE_TYPE (arg), c2)));
5481
5482 /* sqrt(x) < y is x >= 0 when y is very large and we
5483 don't care about Infinities. */
5484 if (! HONOR_INFINITIES (mode))
5485 return fold (build2 (GE_EXPR, type, arg,
5486 build_real (TREE_TYPE (arg), dconst0)));
5487
5488 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5489 if (lang_hooks.decls.global_bindings_p () != 0
5490 || CONTAINS_PLACEHOLDER_P (arg))
5491 return NULL_TREE;
5492
5493 arg = save_expr (arg);
5494 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5495 fold (build2 (GE_EXPR, type, arg,
5496 build_real (TREE_TYPE (arg),
5497 dconst0))),
5498 fold (build2 (NE_EXPR, type, arg,
5499 build_real (TREE_TYPE (arg),
5500 c2)))));
5501 }
5502
5503 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5504 if (! HONOR_NANS (mode))
5505 return fold (build2 (code, type, arg,
5506 build_real (TREE_TYPE (arg), c2)));
5507
5508 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5509 if (lang_hooks.decls.global_bindings_p () == 0
5510 && ! CONTAINS_PLACEHOLDER_P (arg))
5511 {
5512 arg = save_expr (arg);
5513 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5514 fold (build2 (GE_EXPR, type, arg,
5515 build_real (TREE_TYPE (arg),
5516 dconst0))),
5517 fold (build2 (code, type, arg,
5518 build_real (TREE_TYPE (arg),
5519 c2)))));
5520 }
5521 }
5522 }
5523
5524 return NULL_TREE;
5525 }
5526
5527 /* Subroutine of fold() that optimizes comparisons against Infinities,
5528 either +Inf or -Inf.
5529
5530 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5531 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5532 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5533
5534 The function returns the constant folded tree if a simplification
5535 can be made, and NULL_TREE otherwise. */
5536
5537 static tree
5538 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5539 {
5540 enum machine_mode mode;
5541 REAL_VALUE_TYPE max;
5542 tree temp;
5543 bool neg;
5544
5545 mode = TYPE_MODE (TREE_TYPE (arg0));
5546
5547 /* For negative infinity swap the sense of the comparison. */
5548 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5549 if (neg)
5550 code = swap_tree_comparison (code);
5551
5552 switch (code)
5553 {
5554 case GT_EXPR:
5555 /* x > +Inf is always false, if with ignore sNANs. */
5556 if (HONOR_SNANS (mode))
5557 return NULL_TREE;
5558 return omit_one_operand (type, integer_zero_node, arg0);
5559
5560 case LE_EXPR:
5561 /* x <= +Inf is always true, if we don't case about NaNs. */
5562 if (! HONOR_NANS (mode))
5563 return omit_one_operand (type, integer_one_node, arg0);
5564
5565 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5566 if (lang_hooks.decls.global_bindings_p () == 0
5567 && ! CONTAINS_PLACEHOLDER_P (arg0))
5568 {
5569 arg0 = save_expr (arg0);
5570 return fold (build2 (EQ_EXPR, type, arg0, arg0));
5571 }
5572 break;
5573
5574 case EQ_EXPR:
5575 case GE_EXPR:
5576 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5577 real_maxval (&max, neg, mode);
5578 return fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5579 arg0, build_real (TREE_TYPE (arg0), max)));
5580
5581 case LT_EXPR:
5582 /* x < +Inf is always equal to x <= DBL_MAX. */
5583 real_maxval (&max, neg, mode);
5584 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5585 arg0, build_real (TREE_TYPE (arg0), max)));
5586
5587 case NE_EXPR:
5588 /* x != +Inf is always equal to !(x > DBL_MAX). */
5589 real_maxval (&max, neg, mode);
5590 if (! HONOR_NANS (mode))
5591 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5592 arg0, build_real (TREE_TYPE (arg0), max)));
5593
5594 /* The transformation below creates non-gimple code and thus is
5595 not appropriate if we are in gimple form. */
5596 if (in_gimple_form)
5597 return NULL_TREE;
5598
5599 temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5600 arg0, build_real (TREE_TYPE (arg0), max)));
5601 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5602
5603 default:
5604 break;
5605 }
5606
5607 return NULL_TREE;
5608 }
5609
5610 /* Subroutine of fold() that optimizes comparisons of a division by
5611 a nonzero integer constant against an integer constant, i.e.
5612 X/C1 op C2.
5613
5614 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5615 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5616 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5617
5618 The function returns the constant folded tree if a simplification
5619 can be made, and NULL_TREE otherwise. */
5620
5621 static tree
5622 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5623 {
5624 tree prod, tmp, hi, lo;
5625 tree arg00 = TREE_OPERAND (arg0, 0);
5626 tree arg01 = TREE_OPERAND (arg0, 1);
5627 unsigned HOST_WIDE_INT lpart;
5628 HOST_WIDE_INT hpart;
5629 int overflow;
5630
5631 /* We have to do this the hard way to detect unsigned overflow.
5632 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5633 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5634 TREE_INT_CST_HIGH (arg01),
5635 TREE_INT_CST_LOW (arg1),
5636 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5637 prod = build_int_2 (lpart, hpart);
5638 TREE_TYPE (prod) = TREE_TYPE (arg00);
5639 TREE_OVERFLOW (prod) = force_fit_type (prod, overflow)
5640 || TREE_INT_CST_HIGH (prod) != hpart
5641 || TREE_INT_CST_LOW (prod) != lpart;
5642 TREE_CONSTANT_OVERFLOW (prod) = TREE_OVERFLOW (prod);
5643
5644 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5645 {
5646 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5647 lo = prod;
5648
5649 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5650 overflow = add_double (TREE_INT_CST_LOW (prod),
5651 TREE_INT_CST_HIGH (prod),
5652 TREE_INT_CST_LOW (tmp),
5653 TREE_INT_CST_HIGH (tmp),
5654 &lpart, &hpart);
5655 hi = build_int_2 (lpart, hpart);
5656 TREE_TYPE (hi) = TREE_TYPE (arg00);
5657 TREE_OVERFLOW (hi) = force_fit_type (hi, overflow)
5658 || TREE_INT_CST_HIGH (hi) != hpart
5659 || TREE_INT_CST_LOW (hi) != lpart
5660 || TREE_OVERFLOW (prod);
5661 TREE_CONSTANT_OVERFLOW (hi) = TREE_OVERFLOW (hi);
5662 }
5663 else if (tree_int_cst_sgn (arg01) >= 0)
5664 {
5665 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5666 switch (tree_int_cst_sgn (arg1))
5667 {
5668 case -1:
5669 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5670 hi = prod;
5671 break;
5672
5673 case 0:
5674 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5675 hi = tmp;
5676 break;
5677
5678 case 1:
5679 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5680 lo = prod;
5681 break;
5682
5683 default:
5684 abort ();
5685 }
5686 }
5687 else
5688 {
5689 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5690 switch (tree_int_cst_sgn (arg1))
5691 {
5692 case -1:
5693 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5694 lo = prod;
5695 break;
5696
5697 case 0:
5698 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5699 lo = tmp;
5700 break;
5701
5702 case 1:
5703 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5704 hi = prod;
5705 break;
5706
5707 default:
5708 abort ();
5709 }
5710 }
5711
5712 switch (code)
5713 {
5714 case EQ_EXPR:
5715 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5716 return omit_one_operand (type, integer_zero_node, arg00);
5717 if (TREE_OVERFLOW (hi))
5718 return fold (build2 (GE_EXPR, type, arg00, lo));
5719 if (TREE_OVERFLOW (lo))
5720 return fold (build2 (LE_EXPR, type, arg00, hi));
5721 return build_range_check (type, arg00, 1, lo, hi);
5722
5723 case NE_EXPR:
5724 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5725 return omit_one_operand (type, integer_one_node, arg00);
5726 if (TREE_OVERFLOW (hi))
5727 return fold (build2 (LT_EXPR, type, arg00, lo));
5728 if (TREE_OVERFLOW (lo))
5729 return fold (build2 (GT_EXPR, type, arg00, hi));
5730 return build_range_check (type, arg00, 0, lo, hi);
5731
5732 case LT_EXPR:
5733 if (TREE_OVERFLOW (lo))
5734 return omit_one_operand (type, integer_zero_node, arg00);
5735 return fold (build2 (LT_EXPR, type, arg00, lo));
5736
5737 case LE_EXPR:
5738 if (TREE_OVERFLOW (hi))
5739 return omit_one_operand (type, integer_one_node, arg00);
5740 return fold (build2 (LE_EXPR, type, arg00, hi));
5741
5742 case GT_EXPR:
5743 if (TREE_OVERFLOW (hi))
5744 return omit_one_operand (type, integer_zero_node, arg00);
5745 return fold (build2 (GT_EXPR, type, arg00, hi));
5746
5747 case GE_EXPR:
5748 if (TREE_OVERFLOW (lo))
5749 return omit_one_operand (type, integer_one_node, arg00);
5750 return fold (build2 (GE_EXPR, type, arg00, lo));
5751
5752 default:
5753 break;
5754 }
5755
5756 return NULL_TREE;
5757 }
5758
5759
5760 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5761 equality/inequality test, then return a simplified form of
5762 the test using shifts and logical operations. Otherwise return
5763 NULL. TYPE is the desired result type. */
5764
5765 tree
5766 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5767 tree result_type)
5768 {
5769 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5770 operand 0. */
5771 if (code == TRUTH_NOT_EXPR)
5772 {
5773 code = TREE_CODE (arg0);
5774 if (code != NE_EXPR && code != EQ_EXPR)
5775 return NULL_TREE;
5776
5777 /* Extract the arguments of the EQ/NE. */
5778 arg1 = TREE_OPERAND (arg0, 1);
5779 arg0 = TREE_OPERAND (arg0, 0);
5780
5781 /* This requires us to invert the code. */
5782 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5783 }
5784
5785 /* If this is testing a single bit, we can optimize the test. */
5786 if ((code == NE_EXPR || code == EQ_EXPR)
5787 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5788 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5789 {
5790 tree inner = TREE_OPERAND (arg0, 0);
5791 tree type = TREE_TYPE (arg0);
5792 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5793 enum machine_mode operand_mode = TYPE_MODE (type);
5794 int ops_unsigned;
5795 tree signed_type, unsigned_type, intermediate_type;
5796 tree arg00;
5797
5798 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5799 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5800 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5801 if (arg00 != NULL_TREE
5802 /* This is only a win if casting to a signed type is cheap,
5803 i.e. when arg00's type is not a partial mode. */
5804 && TYPE_PRECISION (TREE_TYPE (arg00))
5805 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5806 {
5807 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5808 return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5809 result_type, fold_convert (stype, arg00),
5810 fold_convert (stype, integer_zero_node)));
5811 }
5812
5813 /* Otherwise we have (A & C) != 0 where C is a single bit,
5814 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5815 Similarly for (A & C) == 0. */
5816
5817 /* If INNER is a right shift of a constant and it plus BITNUM does
5818 not overflow, adjust BITNUM and INNER. */
5819 if (TREE_CODE (inner) == RSHIFT_EXPR
5820 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5821 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5822 && bitnum < TYPE_PRECISION (type)
5823 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5824 bitnum - TYPE_PRECISION (type)))
5825 {
5826 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5827 inner = TREE_OPERAND (inner, 0);
5828 }
5829
5830 /* If we are going to be able to omit the AND below, we must do our
5831 operations as unsigned. If we must use the AND, we have a choice.
5832 Normally unsigned is faster, but for some machines signed is. */
5833 #ifdef LOAD_EXTEND_OP
5834 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5835 #else
5836 ops_unsigned = 1;
5837 #endif
5838
5839 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5840 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5841 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5842 inner = fold_convert (intermediate_type, inner);
5843
5844 if (bitnum != 0)
5845 inner = build2 (RSHIFT_EXPR, intermediate_type,
5846 inner, size_int (bitnum));
5847
5848 if (code == EQ_EXPR)
5849 inner = build2 (BIT_XOR_EXPR, intermediate_type,
5850 inner, integer_one_node);
5851
5852 /* Put the AND last so it can combine with more things. */
5853 inner = build2 (BIT_AND_EXPR, intermediate_type,
5854 inner, integer_one_node);
5855
5856 /* Make sure to return the proper type. */
5857 inner = fold_convert (result_type, inner);
5858
5859 return inner;
5860 }
5861 return NULL_TREE;
5862 }
5863
5864 /* Check whether we are allowed to reorder operands arg0 and arg1,
5865 such that the evaluation of arg1 occurs before arg0. */
5866
5867 static bool
5868 reorder_operands_p (tree arg0, tree arg1)
5869 {
5870 if (! flag_evaluation_order)
5871 return true;
5872 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5873 return true;
5874 return ! TREE_SIDE_EFFECTS (arg0)
5875 && ! TREE_SIDE_EFFECTS (arg1);
5876 }
5877
5878 /* Test whether it is preferable two swap two operands, ARG0 and
5879 ARG1, for example because ARG0 is an integer constant and ARG1
5880 isn't. If REORDER is true, only recommend swapping if we can
5881 evaluate the operands in reverse order. */
5882
5883 bool
5884 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5885 {
5886 STRIP_SIGN_NOPS (arg0);
5887 STRIP_SIGN_NOPS (arg1);
5888
5889 if (TREE_CODE (arg1) == INTEGER_CST)
5890 return 0;
5891 if (TREE_CODE (arg0) == INTEGER_CST)
5892 return 1;
5893
5894 if (TREE_CODE (arg1) == REAL_CST)
5895 return 0;
5896 if (TREE_CODE (arg0) == REAL_CST)
5897 return 1;
5898
5899 if (TREE_CODE (arg1) == COMPLEX_CST)
5900 return 0;
5901 if (TREE_CODE (arg0) == COMPLEX_CST)
5902 return 1;
5903
5904 if (TREE_CONSTANT (arg1))
5905 return 0;
5906 if (TREE_CONSTANT (arg0))
5907 return 1;
5908
5909 if (optimize_size)
5910 return 0;
5911
5912 if (reorder && flag_evaluation_order
5913 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5914 return 0;
5915
5916 if (DECL_P (arg1))
5917 return 0;
5918 if (DECL_P (arg0))
5919 return 1;
5920
5921 if (reorder && flag_evaluation_order
5922 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5923 return 0;
5924
5925 if (DECL_P (arg1))
5926 return 0;
5927 if (DECL_P (arg0))
5928 return 1;
5929
5930 /* It is preferable to swap two SSA_NAME to ensure a canonical form
5931 for commutative and comparison operators. Ensuring a canonical
5932 form allows the optimizers to find additional redundancies without
5933 having to explicitly check for both orderings. */
5934 if (TREE_CODE (arg0) == SSA_NAME
5935 && TREE_CODE (arg1) == SSA_NAME
5936 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
5937 return 1;
5938
5939 return 0;
5940 }
5941
5942 /* Perform constant folding and related simplification of EXPR.
5943 The related simplifications include x*1 => x, x*0 => 0, etc.,
5944 and application of the associative law.
5945 NOP_EXPR conversions may be removed freely (as long as we
5946 are careful not to change the type of the overall expression).
5947 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5948 but we can constant-fold them if they have constant operands. */
5949
5950 #ifdef ENABLE_FOLD_CHECKING
5951 # define fold(x) fold_1 (x)
5952 static tree fold_1 (tree);
5953 static
5954 #endif
5955 tree
5956 fold (tree expr)
5957 {
5958 const tree t = expr;
5959 const tree type = TREE_TYPE (expr);
5960 tree t1 = NULL_TREE;
5961 tree tem;
5962 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5963 enum tree_code code = TREE_CODE (t);
5964 int kind = TREE_CODE_CLASS (code);
5965
5966 /* WINS will be nonzero when the switch is done
5967 if all operands are constant. */
5968 int wins = 1;
5969
5970 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5971 Likewise for a SAVE_EXPR that's already been evaluated. */
5972 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5973 return t;
5974
5975 /* Return right away if a constant. */
5976 if (kind == 'c')
5977 return t;
5978
5979 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5980 {
5981 tree subop;
5982
5983 /* Special case for conversion ops that can have fixed point args. */
5984 arg0 = TREE_OPERAND (t, 0);
5985
5986 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5987 if (arg0 != 0)
5988 STRIP_SIGN_NOPS (arg0);
5989
5990 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5991 subop = TREE_REALPART (arg0);
5992 else
5993 subop = arg0;
5994
5995 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5996 && TREE_CODE (subop) != REAL_CST)
5997 /* Note that TREE_CONSTANT isn't enough:
5998 static var addresses are constant but we can't
5999 do arithmetic on them. */
6000 wins = 0;
6001 }
6002 else if (IS_EXPR_CODE_CLASS (kind))
6003 {
6004 int len = first_rtl_op (code);
6005 int i;
6006 for (i = 0; i < len; i++)
6007 {
6008 tree op = TREE_OPERAND (t, i);
6009 tree subop;
6010
6011 if (op == 0)
6012 continue; /* Valid for CALL_EXPR, at least. */
6013
6014 /* Strip any conversions that don't change the mode. This is
6015 safe for every expression, except for a comparison expression
6016 because its signedness is derived from its operands. So, in
6017 the latter case, only strip conversions that don't change the
6018 signedness.
6019
6020 Note that this is done as an internal manipulation within the
6021 constant folder, in order to find the simplest representation
6022 of the arguments so that their form can be studied. In any
6023 cases, the appropriate type conversions should be put back in
6024 the tree that will get out of the constant folder. */
6025 if (kind == '<')
6026 STRIP_SIGN_NOPS (op);
6027 else
6028 STRIP_NOPS (op);
6029
6030 if (TREE_CODE (op) == COMPLEX_CST)
6031 subop = TREE_REALPART (op);
6032 else
6033 subop = op;
6034
6035 if (TREE_CODE (subop) != INTEGER_CST
6036 && TREE_CODE (subop) != REAL_CST)
6037 /* Note that TREE_CONSTANT isn't enough:
6038 static var addresses are constant but we can't
6039 do arithmetic on them. */
6040 wins = 0;
6041
6042 if (i == 0)
6043 arg0 = op;
6044 else if (i == 1)
6045 arg1 = op;
6046 }
6047 }
6048
6049 /* If this is a commutative operation, and ARG0 is a constant, move it
6050 to ARG1 to reduce the number of tests below. */
6051 if (commutative_tree_code (code)
6052 && tree_swap_operands_p (arg0, arg1, true))
6053 return fold (build2 (code, type, TREE_OPERAND (t, 1),
6054 TREE_OPERAND (t, 0)));
6055
6056 /* Now WINS is set as described above,
6057 ARG0 is the first operand of EXPR,
6058 and ARG1 is the second operand (if it has more than one operand).
6059
6060 First check for cases where an arithmetic operation is applied to a
6061 compound, conditional, or comparison operation. Push the arithmetic
6062 operation inside the compound or conditional to see if any folding
6063 can then be done. Convert comparison to conditional for this purpose.
6064 The also optimizes non-constant cases that used to be done in
6065 expand_expr.
6066
6067 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
6068 one of the operands is a comparison and the other is a comparison, a
6069 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
6070 code below would make the expression more complex. Change it to a
6071 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
6072 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
6073
6074 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
6075 || code == EQ_EXPR || code == NE_EXPR)
6076 && ((truth_value_p (TREE_CODE (arg0))
6077 && (truth_value_p (TREE_CODE (arg1))
6078 || (TREE_CODE (arg1) == BIT_AND_EXPR
6079 && integer_onep (TREE_OPERAND (arg1, 1)))))
6080 || (truth_value_p (TREE_CODE (arg1))
6081 && (truth_value_p (TREE_CODE (arg0))
6082 || (TREE_CODE (arg0) == BIT_AND_EXPR
6083 && integer_onep (TREE_OPERAND (arg0, 1)))))))
6084 {
6085 tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
6086 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
6087 : TRUTH_XOR_EXPR,
6088 type, fold_convert (boolean_type_node, arg0),
6089 fold_convert (boolean_type_node, arg1)));
6090
6091 if (code == EQ_EXPR)
6092 tem = invert_truthvalue (tem);
6093
6094 return tem;
6095 }
6096
6097 if (TREE_CODE_CLASS (code) == '1')
6098 {
6099 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6100 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6101 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
6102 else if (TREE_CODE (arg0) == COND_EXPR)
6103 {
6104 tree arg01 = TREE_OPERAND (arg0, 1);
6105 tree arg02 = TREE_OPERAND (arg0, 2);
6106 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6107 arg01 = fold (build1 (code, type, arg01));
6108 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6109 arg02 = fold (build1 (code, type, arg02));
6110 tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6111 arg01, arg02));
6112
6113 /* If this was a conversion, and all we did was to move into
6114 inside the COND_EXPR, bring it back out. But leave it if
6115 it is a conversion from integer to integer and the
6116 result precision is no wider than a word since such a
6117 conversion is cheap and may be optimized away by combine,
6118 while it couldn't if it were outside the COND_EXPR. Then return
6119 so we don't get into an infinite recursion loop taking the
6120 conversion out and then back in. */
6121
6122 if ((code == NOP_EXPR || code == CONVERT_EXPR
6123 || code == NON_LVALUE_EXPR)
6124 && TREE_CODE (tem) == COND_EXPR
6125 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6126 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6127 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6128 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6129 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6130 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6131 && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6132 && (INTEGRAL_TYPE_P
6133 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6134 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD))
6135 tem = build1 (code, type,
6136 build3 (COND_EXPR,
6137 TREE_TYPE (TREE_OPERAND
6138 (TREE_OPERAND (tem, 1), 0)),
6139 TREE_OPERAND (tem, 0),
6140 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6141 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6142 return tem;
6143 }
6144 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
6145 {
6146 if (TREE_CODE (type) == BOOLEAN_TYPE)
6147 {
6148 arg0 = copy_node (arg0);
6149 TREE_TYPE (arg0) = type;
6150 return arg0;
6151 }
6152 else if (TREE_CODE (type) != INTEGER_TYPE)
6153 return fold (build3 (COND_EXPR, type, arg0,
6154 fold (build1 (code, type,
6155 integer_one_node)),
6156 fold (build1 (code, type,
6157 integer_zero_node))));
6158 }
6159 }
6160 else if (TREE_CODE_CLASS (code) == '<'
6161 && TREE_CODE (arg0) == COMPOUND_EXPR)
6162 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6163 fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
6164 else if (TREE_CODE_CLASS (code) == '<'
6165 && TREE_CODE (arg1) == COMPOUND_EXPR)
6166 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6167 fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
6168 else if (TREE_CODE_CLASS (code) == '2'
6169 || TREE_CODE_CLASS (code) == '<')
6170 {
6171 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6172 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6173 fold (build2 (code, type, TREE_OPERAND (arg0, 1),
6174 arg1)));
6175 if (TREE_CODE (arg1) == COMPOUND_EXPR
6176 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
6177 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6178 fold (build2 (code, type,
6179 arg0, TREE_OPERAND (arg1, 1))));
6180
6181 if (TREE_CODE (arg0) == COND_EXPR
6182 || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
6183 {
6184 tem = fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
6185 /*cond_first_p=*/1);
6186 if (tem != NULL_TREE)
6187 return tem;
6188 }
6189
6190 if (TREE_CODE (arg1) == COND_EXPR
6191 || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
6192 {
6193 tem = fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
6194 /*cond_first_p=*/0);
6195 if (tem != NULL_TREE)
6196 return tem;
6197 }
6198 }
6199
6200 switch (code)
6201 {
6202 case CONST_DECL:
6203 return fold (DECL_INITIAL (t));
6204
6205 case NOP_EXPR:
6206 case FLOAT_EXPR:
6207 case CONVERT_EXPR:
6208 case FIX_TRUNC_EXPR:
6209 case FIX_CEIL_EXPR:
6210 case FIX_FLOOR_EXPR:
6211 case FIX_ROUND_EXPR:
6212 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
6213 return TREE_OPERAND (t, 0);
6214
6215 /* Handle cases of two conversions in a row. */
6216 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
6217 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
6218 {
6219 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6220 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
6221 int inside_int = INTEGRAL_TYPE_P (inside_type);
6222 int inside_ptr = POINTER_TYPE_P (inside_type);
6223 int inside_float = FLOAT_TYPE_P (inside_type);
6224 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6225 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6226 int inter_int = INTEGRAL_TYPE_P (inter_type);
6227 int inter_ptr = POINTER_TYPE_P (inter_type);
6228 int inter_float = FLOAT_TYPE_P (inter_type);
6229 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6230 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6231 int final_int = INTEGRAL_TYPE_P (type);
6232 int final_ptr = POINTER_TYPE_P (type);
6233 int final_float = FLOAT_TYPE_P (type);
6234 unsigned int final_prec = TYPE_PRECISION (type);
6235 int final_unsignedp = TYPE_UNSIGNED (type);
6236
6237 /* In addition to the cases of two conversions in a row
6238 handled below, if we are converting something to its own
6239 type via an object of identical or wider precision, neither
6240 conversion is needed. */
6241 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6242 && ((inter_int && final_int) || (inter_float && final_float))
6243 && inter_prec >= final_prec)
6244 return fold (build1 (code, type,
6245 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6246
6247 /* Likewise, if the intermediate and final types are either both
6248 float or both integer, we don't need the middle conversion if
6249 it is wider than the final type and doesn't change the signedness
6250 (for integers). Avoid this if the final type is a pointer
6251 since then we sometimes need the inner conversion. Likewise if
6252 the outer has a precision not equal to the size of its mode. */
6253 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6254 || (inter_float && inside_float))
6255 && inter_prec >= inside_prec
6256 && (inter_float || inter_unsignedp == inside_unsignedp)
6257 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6258 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6259 && ! final_ptr)
6260 return fold (build1 (code, type,
6261 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6262
6263 /* If we have a sign-extension of a zero-extended value, we can
6264 replace that by a single zero-extension. */
6265 if (inside_int && inter_int && final_int
6266 && inside_prec < inter_prec && inter_prec < final_prec
6267 && inside_unsignedp && !inter_unsignedp)
6268 return fold (build1 (code, type,
6269 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6270
6271 /* Two conversions in a row are not needed unless:
6272 - some conversion is floating-point (overstrict for now), or
6273 - the intermediate type is narrower than both initial and
6274 final, or
6275 - the intermediate type and innermost type differ in signedness,
6276 and the outermost type is wider than the intermediate, or
6277 - the initial type is a pointer type and the precisions of the
6278 intermediate and final types differ, or
6279 - the final type is a pointer type and the precisions of the
6280 initial and intermediate types differ. */
6281 if (! inside_float && ! inter_float && ! final_float
6282 && (inter_prec > inside_prec || inter_prec > final_prec)
6283 && ! (inside_int && inter_int
6284 && inter_unsignedp != inside_unsignedp
6285 && inter_prec < final_prec)
6286 && ((inter_unsignedp && inter_prec > inside_prec)
6287 == (final_unsignedp && final_prec > inter_prec))
6288 && ! (inside_ptr && inter_prec != final_prec)
6289 && ! (final_ptr && inside_prec != inter_prec)
6290 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6291 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6292 && ! final_ptr)
6293 return fold (build1 (code, type,
6294 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6295 }
6296
6297 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
6298 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
6299 /* Detect assigning a bitfield. */
6300 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
6301 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
6302 {
6303 /* Don't leave an assignment inside a conversion
6304 unless assigning a bitfield. */
6305 tree prev = TREE_OPERAND (t, 0);
6306 tem = copy_node (t);
6307 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
6308 /* First do the assignment, then return converted constant. */
6309 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
6310 TREE_NO_WARNING (tem) = 1;
6311 TREE_USED (tem) = 1;
6312 return tem;
6313 }
6314
6315 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6316 constants (if x has signed type, the sign bit cannot be set
6317 in c). This folds extension into the BIT_AND_EXPR. */
6318 if (INTEGRAL_TYPE_P (type)
6319 && TREE_CODE (type) != BOOLEAN_TYPE
6320 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
6321 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
6322 {
6323 tree and = TREE_OPERAND (t, 0);
6324 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6325 int change = 0;
6326
6327 if (TYPE_UNSIGNED (TREE_TYPE (and))
6328 || (TYPE_PRECISION (type)
6329 <= TYPE_PRECISION (TREE_TYPE (and))))
6330 change = 1;
6331 else if (TYPE_PRECISION (TREE_TYPE (and1))
6332 <= HOST_BITS_PER_WIDE_INT
6333 && host_integerp (and1, 1))
6334 {
6335 unsigned HOST_WIDE_INT cst;
6336
6337 cst = tree_low_cst (and1, 1);
6338 cst &= (HOST_WIDE_INT) -1
6339 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6340 change = (cst == 0);
6341 #ifdef LOAD_EXTEND_OP
6342 if (change
6343 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6344 == ZERO_EXTEND))
6345 {
6346 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6347 and0 = fold_convert (uns, and0);
6348 and1 = fold_convert (uns, and1);
6349 }
6350 #endif
6351 }
6352 if (change)
6353 return fold (build2 (BIT_AND_EXPR, type,
6354 fold_convert (type, and0),
6355 fold_convert (type, and1)));
6356 }
6357
6358 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6359 T2 being pointers to types of the same size. */
6360 if (POINTER_TYPE_P (TREE_TYPE (t))
6361 && TREE_CODE_CLASS (TREE_CODE (arg0)) == '2'
6362 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6363 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6364 {
6365 tree arg00 = TREE_OPERAND (arg0, 0);
6366 tree t0 = TREE_TYPE (t);
6367 tree t1 = TREE_TYPE (arg00);
6368 tree tt0 = TREE_TYPE (t0);
6369 tree tt1 = TREE_TYPE (t1);
6370 tree s0 = TYPE_SIZE (tt0);
6371 tree s1 = TYPE_SIZE (tt1);
6372
6373 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6374 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6375 TREE_OPERAND (arg0, 1));
6376 }
6377
6378 tem = fold_convert_const (code, type, arg0);
6379 return tem ? tem : t;
6380
6381 case VIEW_CONVERT_EXPR:
6382 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
6383 return build1 (VIEW_CONVERT_EXPR, type,
6384 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6385 return t;
6386
6387 case COMPONENT_REF:
6388 if (TREE_CODE (arg0) == CONSTRUCTOR
6389 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
6390 {
6391 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
6392 if (m)
6393 return TREE_VALUE (m);
6394 }
6395 return t;
6396
6397 case RANGE_EXPR:
6398 if (TREE_CONSTANT (t) != wins)
6399 {
6400 tem = copy_node (t);
6401 TREE_CONSTANT (tem) = wins;
6402 TREE_INVARIANT (tem) = wins;
6403 return tem;
6404 }
6405 return t;
6406
6407 case NEGATE_EXPR:
6408 if (negate_expr_p (arg0))
6409 return fold_convert (type, negate_expr (arg0));
6410 return t;
6411
6412 case ABS_EXPR:
6413 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6414 return fold_abs_const (arg0, type);
6415 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6416 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
6417 /* Convert fabs((double)float) into (double)fabsf(float). */
6418 else if (TREE_CODE (arg0) == NOP_EXPR
6419 && TREE_CODE (type) == REAL_TYPE)
6420 {
6421 tree targ0 = strip_float_extensions (arg0);
6422 if (targ0 != arg0)
6423 return fold_convert (type, fold (build1 (ABS_EXPR,
6424 TREE_TYPE (targ0),
6425 targ0)));
6426 }
6427 else if (tree_expr_nonnegative_p (arg0))
6428 return arg0;
6429 return t;
6430
6431 case CONJ_EXPR:
6432 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6433 return fold_convert (type, arg0);
6434 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6435 return build2 (COMPLEX_EXPR, type,
6436 TREE_OPERAND (arg0, 0),
6437 negate_expr (TREE_OPERAND (arg0, 1)));
6438 else if (TREE_CODE (arg0) == COMPLEX_CST)
6439 return build_complex (type, TREE_REALPART (arg0),
6440 negate_expr (TREE_IMAGPART (arg0)));
6441 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6442 return fold (build2 (TREE_CODE (arg0), type,
6443 fold (build1 (CONJ_EXPR, type,
6444 TREE_OPERAND (arg0, 0))),
6445 fold (build1 (CONJ_EXPR, type,
6446 TREE_OPERAND (arg0, 1)))));
6447 else if (TREE_CODE (arg0) == CONJ_EXPR)
6448 return TREE_OPERAND (arg0, 0);
6449 return t;
6450
6451 case BIT_NOT_EXPR:
6452 if (TREE_CODE (arg0) == INTEGER_CST)
6453 return fold_not_const (arg0, type);
6454 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6455 return TREE_OPERAND (arg0, 0);
6456 return t;
6457
6458 case PLUS_EXPR:
6459 /* A + (-B) -> A - B */
6460 if (TREE_CODE (arg1) == NEGATE_EXPR)
6461 return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6462 /* (-A) + B -> B - A */
6463 if (TREE_CODE (arg0) == NEGATE_EXPR
6464 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
6465 return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
6466 if (! FLOAT_TYPE_P (type))
6467 {
6468 if (integer_zerop (arg1))
6469 return non_lvalue (fold_convert (type, arg0));
6470
6471 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
6472 with a constant, and the two constants have no bits in common,
6473 we should treat this as a BIT_IOR_EXPR since this may produce more
6474 simplifications. */
6475 if (TREE_CODE (arg0) == BIT_AND_EXPR
6476 && TREE_CODE (arg1) == BIT_AND_EXPR
6477 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6478 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6479 && integer_zerop (const_binop (BIT_AND_EXPR,
6480 TREE_OPERAND (arg0, 1),
6481 TREE_OPERAND (arg1, 1), 0)))
6482 {
6483 code = BIT_IOR_EXPR;
6484 goto bit_ior;
6485 }
6486
6487 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
6488 (plus (plus (mult) (mult)) (foo)) so that we can
6489 take advantage of the factoring cases below. */
6490 if ((TREE_CODE (arg0) == PLUS_EXPR
6491 && TREE_CODE (arg1) == MULT_EXPR)
6492 || (TREE_CODE (arg1) == PLUS_EXPR
6493 && TREE_CODE (arg0) == MULT_EXPR))
6494 {
6495 tree parg0, parg1, parg, marg;
6496
6497 if (TREE_CODE (arg0) == PLUS_EXPR)
6498 parg = arg0, marg = arg1;
6499 else
6500 parg = arg1, marg = arg0;
6501 parg0 = TREE_OPERAND (parg, 0);
6502 parg1 = TREE_OPERAND (parg, 1);
6503 STRIP_NOPS (parg0);
6504 STRIP_NOPS (parg1);
6505
6506 if (TREE_CODE (parg0) == MULT_EXPR
6507 && TREE_CODE (parg1) != MULT_EXPR)
6508 return fold (build2 (PLUS_EXPR, type,
6509 fold (build2 (PLUS_EXPR, type,
6510 fold_convert (type, parg0),
6511 fold_convert (type, marg))),
6512 fold_convert (type, parg1)));
6513 if (TREE_CODE (parg0) != MULT_EXPR
6514 && TREE_CODE (parg1) == MULT_EXPR)
6515 return fold (build2 (PLUS_EXPR, type,
6516 fold (build2 (PLUS_EXPR, type,
6517 fold_convert (type, parg1),
6518 fold_convert (type, marg))),
6519 fold_convert (type, parg0)));
6520 }
6521
6522 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
6523 {
6524 tree arg00, arg01, arg10, arg11;
6525 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6526
6527 /* (A * C) + (B * C) -> (A+B) * C.
6528 We are most concerned about the case where C is a constant,
6529 but other combinations show up during loop reduction. Since
6530 it is not difficult, try all four possibilities. */
6531
6532 arg00 = TREE_OPERAND (arg0, 0);
6533 arg01 = TREE_OPERAND (arg0, 1);
6534 arg10 = TREE_OPERAND (arg1, 0);
6535 arg11 = TREE_OPERAND (arg1, 1);
6536 same = NULL_TREE;
6537
6538 if (operand_equal_p (arg01, arg11, 0))
6539 same = arg01, alt0 = arg00, alt1 = arg10;
6540 else if (operand_equal_p (arg00, arg10, 0))
6541 same = arg00, alt0 = arg01, alt1 = arg11;
6542 else if (operand_equal_p (arg00, arg11, 0))
6543 same = arg00, alt0 = arg01, alt1 = arg10;
6544 else if (operand_equal_p (arg01, arg10, 0))
6545 same = arg01, alt0 = arg00, alt1 = arg11;
6546
6547 /* No identical multiplicands; see if we can find a common
6548 power-of-two factor in non-power-of-two multiplies. This
6549 can help in multi-dimensional array access. */
6550 else if (TREE_CODE (arg01) == INTEGER_CST
6551 && TREE_CODE (arg11) == INTEGER_CST
6552 && TREE_INT_CST_HIGH (arg01) == 0
6553 && TREE_INT_CST_HIGH (arg11) == 0)
6554 {
6555 HOST_WIDE_INT int01, int11, tmp;
6556 int01 = TREE_INT_CST_LOW (arg01);
6557 int11 = TREE_INT_CST_LOW (arg11);
6558
6559 /* Move min of absolute values to int11. */
6560 if ((int01 >= 0 ? int01 : -int01)
6561 < (int11 >= 0 ? int11 : -int11))
6562 {
6563 tmp = int01, int01 = int11, int11 = tmp;
6564 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6565 alt0 = arg01, arg01 = arg11, arg11 = alt0;
6566 }
6567
6568 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6569 {
6570 alt0 = fold (build2 (MULT_EXPR, type, arg00,
6571 build_int_2 (int01 / int11, 0)));
6572 alt1 = arg10;
6573 same = arg11;
6574 }
6575 }
6576
6577 if (same)
6578 return fold (build2 (MULT_EXPR, type,
6579 fold (build2 (PLUS_EXPR, type,
6580 alt0, alt1)),
6581 same));
6582 }
6583 }
6584 else
6585 {
6586 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6587 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6588 return non_lvalue (fold_convert (type, arg0));
6589
6590 /* Likewise if the operands are reversed. */
6591 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6592 return non_lvalue (fold_convert (type, arg1));
6593
6594 /* Convert x+x into x*2.0. */
6595 if (operand_equal_p (arg0, arg1, 0)
6596 && SCALAR_FLOAT_TYPE_P (type))
6597 return fold (build2 (MULT_EXPR, type, arg0,
6598 build_real (type, dconst2)));
6599
6600 /* Convert x*c+x into x*(c+1). */
6601 if (flag_unsafe_math_optimizations
6602 && TREE_CODE (arg0) == MULT_EXPR
6603 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6604 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6605 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6606 {
6607 REAL_VALUE_TYPE c;
6608
6609 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6610 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6611 return fold (build2 (MULT_EXPR, type, arg1,
6612 build_real (type, c)));
6613 }
6614
6615 /* Convert x+x*c into x*(c+1). */
6616 if (flag_unsafe_math_optimizations
6617 && TREE_CODE (arg1) == MULT_EXPR
6618 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6619 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6620 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6621 {
6622 REAL_VALUE_TYPE c;
6623
6624 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6625 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6626 return fold (build2 (MULT_EXPR, type, arg0,
6627 build_real (type, c)));
6628 }
6629
6630 /* Convert x*c1+x*c2 into x*(c1+c2). */
6631 if (flag_unsafe_math_optimizations
6632 && TREE_CODE (arg0) == MULT_EXPR
6633 && TREE_CODE (arg1) == MULT_EXPR
6634 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6635 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6636 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6637 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6638 && operand_equal_p (TREE_OPERAND (arg0, 0),
6639 TREE_OPERAND (arg1, 0), 0))
6640 {
6641 REAL_VALUE_TYPE c1, c2;
6642
6643 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6644 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6645 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6646 return fold (build2 (MULT_EXPR, type,
6647 TREE_OPERAND (arg0, 0),
6648 build_real (type, c1)));
6649 }
6650 /* Convert a + (b*c + d*e) into (a + b*c) + d*e */
6651 if (flag_unsafe_math_optimizations
6652 && TREE_CODE (arg1) == PLUS_EXPR
6653 && TREE_CODE (arg0) != MULT_EXPR)
6654 {
6655 tree tree10 = TREE_OPERAND (arg1, 0);
6656 tree tree11 = TREE_OPERAND (arg1, 1);
6657 if (TREE_CODE (tree11) == MULT_EXPR
6658 && TREE_CODE (tree10) == MULT_EXPR)
6659 {
6660 tree tree0;
6661 tree0 = fold (build2 (PLUS_EXPR, type, arg0, tree10));
6662 return fold (build2 (PLUS_EXPR, type, tree0, tree11));
6663 }
6664 }
6665 /* Convert (b*c + d*e) + a into b*c + (d*e +a) */
6666 if (flag_unsafe_math_optimizations
6667 && TREE_CODE (arg0) == PLUS_EXPR
6668 && TREE_CODE (arg1) != MULT_EXPR)
6669 {
6670 tree tree00 = TREE_OPERAND (arg0, 0);
6671 tree tree01 = TREE_OPERAND (arg0, 1);
6672 if (TREE_CODE (tree01) == MULT_EXPR
6673 && TREE_CODE (tree00) == MULT_EXPR)
6674 {
6675 tree tree0;
6676 tree0 = fold (build2 (PLUS_EXPR, type, tree01, arg1));
6677 return fold (build2 (PLUS_EXPR, type, tree00, tree0));
6678 }
6679 }
6680 }
6681
6682 bit_rotate:
6683 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6684 is a rotate of A by C1 bits. */
6685 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6686 is a rotate of A by B bits. */
6687 {
6688 enum tree_code code0, code1;
6689 code0 = TREE_CODE (arg0);
6690 code1 = TREE_CODE (arg1);
6691 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6692 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6693 && operand_equal_p (TREE_OPERAND (arg0, 0),
6694 TREE_OPERAND (arg1, 0), 0)
6695 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6696 {
6697 tree tree01, tree11;
6698 enum tree_code code01, code11;
6699
6700 tree01 = TREE_OPERAND (arg0, 1);
6701 tree11 = TREE_OPERAND (arg1, 1);
6702 STRIP_NOPS (tree01);
6703 STRIP_NOPS (tree11);
6704 code01 = TREE_CODE (tree01);
6705 code11 = TREE_CODE (tree11);
6706 if (code01 == INTEGER_CST
6707 && code11 == INTEGER_CST
6708 && TREE_INT_CST_HIGH (tree01) == 0
6709 && TREE_INT_CST_HIGH (tree11) == 0
6710 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6711 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6712 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6713 code0 == LSHIFT_EXPR ? tree01 : tree11);
6714 else if (code11 == MINUS_EXPR)
6715 {
6716 tree tree110, tree111;
6717 tree110 = TREE_OPERAND (tree11, 0);
6718 tree111 = TREE_OPERAND (tree11, 1);
6719 STRIP_NOPS (tree110);
6720 STRIP_NOPS (tree111);
6721 if (TREE_CODE (tree110) == INTEGER_CST
6722 && 0 == compare_tree_int (tree110,
6723 TYPE_PRECISION
6724 (TREE_TYPE (TREE_OPERAND
6725 (arg0, 0))))
6726 && operand_equal_p (tree01, tree111, 0))
6727 return build2 ((code0 == LSHIFT_EXPR
6728 ? LROTATE_EXPR
6729 : RROTATE_EXPR),
6730 type, TREE_OPERAND (arg0, 0), tree01);
6731 }
6732 else if (code01 == MINUS_EXPR)
6733 {
6734 tree tree010, tree011;
6735 tree010 = TREE_OPERAND (tree01, 0);
6736 tree011 = TREE_OPERAND (tree01, 1);
6737 STRIP_NOPS (tree010);
6738 STRIP_NOPS (tree011);
6739 if (TREE_CODE (tree010) == INTEGER_CST
6740 && 0 == compare_tree_int (tree010,
6741 TYPE_PRECISION
6742 (TREE_TYPE (TREE_OPERAND
6743 (arg0, 0))))
6744 && operand_equal_p (tree11, tree011, 0))
6745 return build2 ((code0 != LSHIFT_EXPR
6746 ? LROTATE_EXPR
6747 : RROTATE_EXPR),
6748 type, TREE_OPERAND (arg0, 0), tree11);
6749 }
6750 }
6751 }
6752
6753 associate:
6754 /* In most languages, can't associate operations on floats through
6755 parentheses. Rather than remember where the parentheses were, we
6756 don't associate floats at all, unless the user has specified
6757 -funsafe-math-optimizations. */
6758
6759 if (! wins
6760 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6761 {
6762 tree var0, con0, lit0, minus_lit0;
6763 tree var1, con1, lit1, minus_lit1;
6764
6765 /* Split both trees into variables, constants, and literals. Then
6766 associate each group together, the constants with literals,
6767 then the result with variables. This increases the chances of
6768 literals being recombined later and of generating relocatable
6769 expressions for the sum of a constant and literal. */
6770 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6771 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6772 code == MINUS_EXPR);
6773
6774 /* Only do something if we found more than two objects. Otherwise,
6775 nothing has changed and we risk infinite recursion. */
6776 if (2 < ((var0 != 0) + (var1 != 0)
6777 + (con0 != 0) + (con1 != 0)
6778 + (lit0 != 0) + (lit1 != 0)
6779 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6780 {
6781 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6782 if (code == MINUS_EXPR)
6783 code = PLUS_EXPR;
6784
6785 var0 = associate_trees (var0, var1, code, type);
6786 con0 = associate_trees (con0, con1, code, type);
6787 lit0 = associate_trees (lit0, lit1, code, type);
6788 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6789
6790 /* Preserve the MINUS_EXPR if the negative part of the literal is
6791 greater than the positive part. Otherwise, the multiplicative
6792 folding code (i.e extract_muldiv) may be fooled in case
6793 unsigned constants are subtracted, like in the following
6794 example: ((X*2 + 4) - 8U)/2. */
6795 if (minus_lit0 && lit0)
6796 {
6797 if (TREE_CODE (lit0) == INTEGER_CST
6798 && TREE_CODE (minus_lit0) == INTEGER_CST
6799 && tree_int_cst_lt (lit0, minus_lit0))
6800 {
6801 minus_lit0 = associate_trees (minus_lit0, lit0,
6802 MINUS_EXPR, type);
6803 lit0 = 0;
6804 }
6805 else
6806 {
6807 lit0 = associate_trees (lit0, minus_lit0,
6808 MINUS_EXPR, type);
6809 minus_lit0 = 0;
6810 }
6811 }
6812 if (minus_lit0)
6813 {
6814 if (con0 == 0)
6815 return fold_convert (type,
6816 associate_trees (var0, minus_lit0,
6817 MINUS_EXPR, type));
6818 else
6819 {
6820 con0 = associate_trees (con0, minus_lit0,
6821 MINUS_EXPR, type);
6822 return fold_convert (type,
6823 associate_trees (var0, con0,
6824 PLUS_EXPR, type));
6825 }
6826 }
6827
6828 con0 = associate_trees (con0, lit0, code, type);
6829 return fold_convert (type, associate_trees (var0, con0,
6830 code, type));
6831 }
6832 }
6833
6834 binary:
6835 if (wins)
6836 t1 = const_binop (code, arg0, arg1, 0);
6837 if (t1 != NULL_TREE)
6838 {
6839 /* The return value should always have
6840 the same type as the original expression. */
6841 if (TREE_TYPE (t1) != type)
6842 t1 = fold_convert (type, t1);
6843
6844 return t1;
6845 }
6846 return t;
6847
6848 case MINUS_EXPR:
6849 /* A - (-B) -> A + B */
6850 if (TREE_CODE (arg1) == NEGATE_EXPR)
6851 return fold (build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6852 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6853 if (TREE_CODE (arg0) == NEGATE_EXPR
6854 && (FLOAT_TYPE_P (type)
6855 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6856 && negate_expr_p (arg1)
6857 && reorder_operands_p (arg0, arg1))
6858 return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
6859 TREE_OPERAND (arg0, 0)));
6860
6861 if (! FLOAT_TYPE_P (type))
6862 {
6863 if (! wins && integer_zerop (arg0))
6864 return negate_expr (fold_convert (type, arg1));
6865 if (integer_zerop (arg1))
6866 return non_lvalue (fold_convert (type, arg0));
6867
6868 /* Fold A - (A & B) into ~B & A. */
6869 if (!TREE_SIDE_EFFECTS (arg0)
6870 && TREE_CODE (arg1) == BIT_AND_EXPR)
6871 {
6872 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6873 return fold (build2 (BIT_AND_EXPR, type,
6874 fold (build1 (BIT_NOT_EXPR, type,
6875 TREE_OPERAND (arg1, 0))),
6876 arg0));
6877 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6878 return fold (build2 (BIT_AND_EXPR, type,
6879 fold (build1 (BIT_NOT_EXPR, type,
6880 TREE_OPERAND (arg1, 1))),
6881 arg0));
6882 }
6883
6884 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6885 any power of 2 minus 1. */
6886 if (TREE_CODE (arg0) == BIT_AND_EXPR
6887 && TREE_CODE (arg1) == BIT_AND_EXPR
6888 && operand_equal_p (TREE_OPERAND (arg0, 0),
6889 TREE_OPERAND (arg1, 0), 0))
6890 {
6891 tree mask0 = TREE_OPERAND (arg0, 1);
6892 tree mask1 = TREE_OPERAND (arg1, 1);
6893 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6894
6895 if (operand_equal_p (tem, mask1, 0))
6896 {
6897 tem = fold (build2 (BIT_XOR_EXPR, type,
6898 TREE_OPERAND (arg0, 0), mask1));
6899 return fold (build2 (MINUS_EXPR, type, tem, mask1));
6900 }
6901 }
6902 }
6903
6904 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6905 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6906 return non_lvalue (fold_convert (type, arg0));
6907
6908 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6909 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6910 (-ARG1 + ARG0) reduces to -ARG1. */
6911 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6912 return negate_expr (fold_convert (type, arg1));
6913
6914 /* Fold &x - &x. This can happen from &x.foo - &x.
6915 This is unsafe for certain floats even in non-IEEE formats.
6916 In IEEE, it is unsafe because it does wrong for NaNs.
6917 Also note that operand_equal_p is always false if an operand
6918 is volatile. */
6919
6920 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6921 && operand_equal_p (arg0, arg1, 0))
6922 return fold_convert (type, integer_zero_node);
6923
6924 /* A - B -> A + (-B) if B is easily negatable. */
6925 if (!wins && negate_expr_p (arg1)
6926 && (FLOAT_TYPE_P (type)
6927 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
6928 return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
6929
6930 if (TREE_CODE (arg0) == MULT_EXPR
6931 && TREE_CODE (arg1) == MULT_EXPR
6932 && (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
6933 {
6934 /* (A * C) - (B * C) -> (A-B) * C. */
6935 if (operand_equal_p (TREE_OPERAND (arg0, 1),
6936 TREE_OPERAND (arg1, 1), 0))
6937 return fold (build2 (MULT_EXPR, type,
6938 fold (build2 (MINUS_EXPR, type,
6939 TREE_OPERAND (arg0, 0),
6940 TREE_OPERAND (arg1, 0))),
6941 TREE_OPERAND (arg0, 1)));
6942 /* (A * C1) - (A * C2) -> A * (C1-C2). */
6943 if (operand_equal_p (TREE_OPERAND (arg0, 0),
6944 TREE_OPERAND (arg1, 0), 0))
6945 return fold (build2 (MULT_EXPR, type,
6946 TREE_OPERAND (arg0, 0),
6947 fold (build2 (MINUS_EXPR, type,
6948 TREE_OPERAND (arg0, 1),
6949 TREE_OPERAND (arg1, 1)))));
6950 }
6951
6952 goto associate;
6953
6954 case MULT_EXPR:
6955 /* (-A) * (-B) -> A * B */
6956 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6957 return fold (build2 (MULT_EXPR, type,
6958 TREE_OPERAND (arg0, 0),
6959 negate_expr (arg1)));
6960 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6961 return fold (build2 (MULT_EXPR, type,
6962 negate_expr (arg0),
6963 TREE_OPERAND (arg1, 0)));
6964
6965 if (! FLOAT_TYPE_P (type))
6966 {
6967 if (integer_zerop (arg1))
6968 return omit_one_operand (type, arg1, arg0);
6969 if (integer_onep (arg1))
6970 return non_lvalue (fold_convert (type, arg0));
6971
6972 /* (a * (1 << b)) is (a << b) */
6973 if (TREE_CODE (arg1) == LSHIFT_EXPR
6974 && integer_onep (TREE_OPERAND (arg1, 0)))
6975 return fold (build2 (LSHIFT_EXPR, type, arg0,
6976 TREE_OPERAND (arg1, 1)));
6977 if (TREE_CODE (arg0) == LSHIFT_EXPR
6978 && integer_onep (TREE_OPERAND (arg0, 0)))
6979 return fold (build2 (LSHIFT_EXPR, type, arg1,
6980 TREE_OPERAND (arg0, 1)));
6981
6982 if (TREE_CODE (arg1) == INTEGER_CST
6983 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6984 fold_convert (type, arg1),
6985 code, NULL_TREE)))
6986 return fold_convert (type, tem);
6987
6988 }
6989 else
6990 {
6991 /* Maybe fold x * 0 to 0. The expressions aren't the same
6992 when x is NaN, since x * 0 is also NaN. Nor are they the
6993 same in modes with signed zeros, since multiplying a
6994 negative value by 0 gives -0, not +0. */
6995 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6996 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6997 && real_zerop (arg1))
6998 return omit_one_operand (type, arg1, arg0);
6999 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7000 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7001 && real_onep (arg1))
7002 return non_lvalue (fold_convert (type, arg0));
7003
7004 /* Transform x * -1.0 into -x. */
7005 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7006 && real_minus_onep (arg1))
7007 return fold_convert (type, negate_expr (arg0));
7008
7009 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7010 if (flag_unsafe_math_optimizations
7011 && TREE_CODE (arg0) == RDIV_EXPR
7012 && TREE_CODE (arg1) == REAL_CST
7013 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7014 {
7015 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7016 arg1, 0);
7017 if (tem)
7018 return fold (build2 (RDIV_EXPR, type, tem,
7019 TREE_OPERAND (arg0, 1)));
7020 }
7021
7022 if (flag_unsafe_math_optimizations)
7023 {
7024 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7025 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7026
7027 /* Optimizations of root(...)*root(...). */
7028 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7029 {
7030 tree rootfn, arg, arglist;
7031 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7032 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7033
7034 /* Optimize sqrt(x)*sqrt(x) as x. */
7035 if (BUILTIN_SQRT_P (fcode0)
7036 && operand_equal_p (arg00, arg10, 0)
7037 && ! HONOR_SNANS (TYPE_MODE (type)))
7038 return arg00;
7039
7040 /* Optimize root(x)*root(y) as root(x*y). */
7041 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7042 arg = fold (build2 (MULT_EXPR, type, arg00, arg10));
7043 arglist = build_tree_list (NULL_TREE, arg);
7044 return build_function_call_expr (rootfn, arglist);
7045 }
7046
7047 /* Optimize expN(x)*expN(y) as expN(x+y). */
7048 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7049 {
7050 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7051 tree arg = build2 (PLUS_EXPR, type,
7052 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7053 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7054 tree arglist = build_tree_list (NULL_TREE, fold (arg));
7055 return build_function_call_expr (expfn, arglist);
7056 }
7057
7058 /* Optimizations of pow(...)*pow(...). */
7059 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7060 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7061 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7062 {
7063 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7064 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7065 1)));
7066 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7067 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7068 1)));
7069
7070 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7071 if (operand_equal_p (arg01, arg11, 0))
7072 {
7073 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7074 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
7075 tree arglist = tree_cons (NULL_TREE, fold (arg),
7076 build_tree_list (NULL_TREE,
7077 arg01));
7078 return build_function_call_expr (powfn, arglist);
7079 }
7080
7081 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7082 if (operand_equal_p (arg00, arg10, 0))
7083 {
7084 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7085 tree arg = fold (build2 (PLUS_EXPR, type, arg01, arg11));
7086 tree arglist = tree_cons (NULL_TREE, arg00,
7087 build_tree_list (NULL_TREE,
7088 arg));
7089 return build_function_call_expr (powfn, arglist);
7090 }
7091 }
7092
7093 /* Optimize tan(x)*cos(x) as sin(x). */
7094 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7095 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7096 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7097 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7098 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7099 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7100 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7101 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7102 {
7103 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7104
7105 if (sinfn != NULL_TREE)
7106 return build_function_call_expr (sinfn,
7107 TREE_OPERAND (arg0, 1));
7108 }
7109
7110 /* Optimize x*pow(x,c) as pow(x,c+1). */
7111 if (fcode1 == BUILT_IN_POW
7112 || fcode1 == BUILT_IN_POWF
7113 || fcode1 == BUILT_IN_POWL)
7114 {
7115 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7116 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7117 1)));
7118 if (TREE_CODE (arg11) == REAL_CST
7119 && ! TREE_CONSTANT_OVERFLOW (arg11)
7120 && operand_equal_p (arg0, arg10, 0))
7121 {
7122 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7123 REAL_VALUE_TYPE c;
7124 tree arg, arglist;
7125
7126 c = TREE_REAL_CST (arg11);
7127 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7128 arg = build_real (type, c);
7129 arglist = build_tree_list (NULL_TREE, arg);
7130 arglist = tree_cons (NULL_TREE, arg0, arglist);
7131 return build_function_call_expr (powfn, arglist);
7132 }
7133 }
7134
7135 /* Optimize pow(x,c)*x as pow(x,c+1). */
7136 if (fcode0 == BUILT_IN_POW
7137 || fcode0 == BUILT_IN_POWF
7138 || fcode0 == BUILT_IN_POWL)
7139 {
7140 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7141 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7142 1)));
7143 if (TREE_CODE (arg01) == REAL_CST
7144 && ! TREE_CONSTANT_OVERFLOW (arg01)
7145 && operand_equal_p (arg1, arg00, 0))
7146 {
7147 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7148 REAL_VALUE_TYPE c;
7149 tree arg, arglist;
7150
7151 c = TREE_REAL_CST (arg01);
7152 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7153 arg = build_real (type, c);
7154 arglist = build_tree_list (NULL_TREE, arg);
7155 arglist = tree_cons (NULL_TREE, arg1, arglist);
7156 return build_function_call_expr (powfn, arglist);
7157 }
7158 }
7159
7160 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7161 if (! optimize_size
7162 && operand_equal_p (arg0, arg1, 0))
7163 {
7164 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7165
7166 if (powfn)
7167 {
7168 tree arg = build_real (type, dconst2);
7169 tree arglist = build_tree_list (NULL_TREE, arg);
7170 arglist = tree_cons (NULL_TREE, arg0, arglist);
7171 return build_function_call_expr (powfn, arglist);
7172 }
7173 }
7174 }
7175 }
7176 goto associate;
7177
7178 case BIT_IOR_EXPR:
7179 bit_ior:
7180 if (integer_all_onesp (arg1))
7181 return omit_one_operand (type, arg1, arg0);
7182 if (integer_zerop (arg1))
7183 return non_lvalue (fold_convert (type, arg0));
7184 if (operand_equal_p (arg0, arg1, 0))
7185 return non_lvalue (fold_convert (type, arg0));
7186 t1 = distribute_bit_expr (code, type, arg0, arg1);
7187 if (t1 != NULL_TREE)
7188 return t1;
7189
7190 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
7191
7192 This results in more efficient code for machines without a NAND
7193 instruction. Combine will canonicalize to the first form
7194 which will allow use of NAND instructions provided by the
7195 backend if they exist. */
7196 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7197 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7198 {
7199 return fold (build1 (BIT_NOT_EXPR, type,
7200 build2 (BIT_AND_EXPR, type,
7201 TREE_OPERAND (arg0, 0),
7202 TREE_OPERAND (arg1, 0))));
7203 }
7204
7205 /* See if this can be simplified into a rotate first. If that
7206 is unsuccessful continue in the association code. */
7207 goto bit_rotate;
7208
7209 case BIT_XOR_EXPR:
7210 if (integer_zerop (arg1))
7211 return non_lvalue (fold_convert (type, arg0));
7212 if (integer_all_onesp (arg1))
7213 return fold (build1 (BIT_NOT_EXPR, type, arg0));
7214 if (operand_equal_p (arg0, arg1, 0))
7215 return omit_one_operand (type, integer_zero_node, arg0);
7216
7217 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
7218 with a constant, and the two constants have no bits in common,
7219 we should treat this as a BIT_IOR_EXPR since this may produce more
7220 simplifications. */
7221 if (TREE_CODE (arg0) == BIT_AND_EXPR
7222 && TREE_CODE (arg1) == BIT_AND_EXPR
7223 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7224 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7225 && integer_zerop (const_binop (BIT_AND_EXPR,
7226 TREE_OPERAND (arg0, 1),
7227 TREE_OPERAND (arg1, 1), 0)))
7228 {
7229 code = BIT_IOR_EXPR;
7230 goto bit_ior;
7231 }
7232
7233 /* See if this can be simplified into a rotate first. If that
7234 is unsuccessful continue in the association code. */
7235 goto bit_rotate;
7236
7237 case BIT_AND_EXPR:
7238 if (integer_all_onesp (arg1))
7239 return non_lvalue (fold_convert (type, arg0));
7240 if (integer_zerop (arg1))
7241 return omit_one_operand (type, arg1, arg0);
7242 if (operand_equal_p (arg0, arg1, 0))
7243 return non_lvalue (fold_convert (type, arg0));
7244 t1 = distribute_bit_expr (code, type, arg0, arg1);
7245 if (t1 != NULL_TREE)
7246 return t1;
7247 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
7248 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
7249 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7250 {
7251 unsigned int prec
7252 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
7253
7254 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
7255 && (~TREE_INT_CST_LOW (arg1)
7256 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
7257 return fold_convert (type, TREE_OPERAND (arg0, 0));
7258 }
7259
7260 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
7261
7262 This results in more efficient code for machines without a NOR
7263 instruction. Combine will canonicalize to the first form
7264 which will allow use of NOR instructions provided by the
7265 backend if they exist. */
7266 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7267 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7268 {
7269 return fold (build1 (BIT_NOT_EXPR, type,
7270 build2 (BIT_IOR_EXPR, type,
7271 TREE_OPERAND (arg0, 0),
7272 TREE_OPERAND (arg1, 0))));
7273 }
7274
7275 goto associate;
7276
7277 case RDIV_EXPR:
7278 /* Don't touch a floating-point divide by zero unless the mode
7279 of the constant can represent infinity. */
7280 if (TREE_CODE (arg1) == REAL_CST
7281 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
7282 && real_zerop (arg1))
7283 return t;
7284
7285 /* (-A) / (-B) -> A / B */
7286 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7287 return fold (build2 (RDIV_EXPR, type,
7288 TREE_OPERAND (arg0, 0),
7289 negate_expr (arg1)));
7290 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7291 return fold (build2 (RDIV_EXPR, type,
7292 negate_expr (arg0),
7293 TREE_OPERAND (arg1, 0)));
7294
7295 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
7296 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7297 && real_onep (arg1))
7298 return non_lvalue (fold_convert (type, arg0));
7299
7300 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
7301 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7302 && real_minus_onep (arg1))
7303 return non_lvalue (fold_convert (type, negate_expr (arg0)));
7304
7305 /* If ARG1 is a constant, we can convert this to a multiply by the
7306 reciprocal. This does not have the same rounding properties,
7307 so only do this if -funsafe-math-optimizations. We can actually
7308 always safely do it if ARG1 is a power of two, but it's hard to
7309 tell if it is or not in a portable manner. */
7310 if (TREE_CODE (arg1) == REAL_CST)
7311 {
7312 if (flag_unsafe_math_optimizations
7313 && 0 != (tem = const_binop (code, build_real (type, dconst1),
7314 arg1, 0)))
7315 return fold (build2 (MULT_EXPR, type, arg0, tem));
7316 /* Find the reciprocal if optimizing and the result is exact. */
7317 if (optimize)
7318 {
7319 REAL_VALUE_TYPE r;
7320 r = TREE_REAL_CST (arg1);
7321 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
7322 {
7323 tem = build_real (type, r);
7324 return fold (build2 (MULT_EXPR, type, arg0, tem));
7325 }
7326 }
7327 }
7328 /* Convert A/B/C to A/(B*C). */
7329 if (flag_unsafe_math_optimizations
7330 && TREE_CODE (arg0) == RDIV_EXPR)
7331 return fold (build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
7332 fold (build2 (MULT_EXPR, type,
7333 TREE_OPERAND (arg0, 1), arg1))));
7334
7335 /* Convert A/(B/C) to (A/B)*C. */
7336 if (flag_unsafe_math_optimizations
7337 && TREE_CODE (arg1) == RDIV_EXPR)
7338 return fold (build2 (MULT_EXPR, type,
7339 fold (build2 (RDIV_EXPR, type, arg0,
7340 TREE_OPERAND (arg1, 0))),
7341 TREE_OPERAND (arg1, 1)));
7342
7343 /* Convert C1/(X*C2) into (C1/C2)/X. */
7344 if (flag_unsafe_math_optimizations
7345 && TREE_CODE (arg1) == MULT_EXPR
7346 && TREE_CODE (arg0) == REAL_CST
7347 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
7348 {
7349 tree tem = const_binop (RDIV_EXPR, arg0,
7350 TREE_OPERAND (arg1, 1), 0);
7351 if (tem)
7352 return fold (build2 (RDIV_EXPR, type, tem,
7353 TREE_OPERAND (arg1, 0)));
7354 }
7355
7356 if (flag_unsafe_math_optimizations)
7357 {
7358 enum built_in_function fcode = builtin_mathfn_code (arg1);
7359 /* Optimize x/expN(y) into x*expN(-y). */
7360 if (BUILTIN_EXPONENT_P (fcode))
7361 {
7362 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7363 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
7364 tree arglist = build_tree_list (NULL_TREE,
7365 fold_convert (type, arg));
7366 arg1 = build_function_call_expr (expfn, arglist);
7367 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7368 }
7369
7370 /* Optimize x/pow(y,z) into x*pow(y,-z). */
7371 if (fcode == BUILT_IN_POW
7372 || fcode == BUILT_IN_POWF
7373 || fcode == BUILT_IN_POWL)
7374 {
7375 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7376 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7377 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
7378 tree neg11 = fold_convert (type, negate_expr (arg11));
7379 tree arglist = tree_cons(NULL_TREE, arg10,
7380 build_tree_list (NULL_TREE, neg11));
7381 arg1 = build_function_call_expr (powfn, arglist);
7382 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7383 }
7384 }
7385
7386 if (flag_unsafe_math_optimizations)
7387 {
7388 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7389 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7390
7391 /* Optimize sin(x)/cos(x) as tan(x). */
7392 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
7393 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
7394 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
7395 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7396 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7397 {
7398 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7399
7400 if (tanfn != NULL_TREE)
7401 return build_function_call_expr (tanfn,
7402 TREE_OPERAND (arg0, 1));
7403 }
7404
7405 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
7406 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
7407 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
7408 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
7409 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7410 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7411 {
7412 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7413
7414 if (tanfn != NULL_TREE)
7415 {
7416 tree tmp = TREE_OPERAND (arg0, 1);
7417 tmp = build_function_call_expr (tanfn, tmp);
7418 return fold (build2 (RDIV_EXPR, type,
7419 build_real (type, dconst1), tmp));
7420 }
7421 }
7422
7423 /* Optimize pow(x,c)/x as pow(x,c-1). */
7424 if (fcode0 == BUILT_IN_POW
7425 || fcode0 == BUILT_IN_POWF
7426 || fcode0 == BUILT_IN_POWL)
7427 {
7428 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7429 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
7430 if (TREE_CODE (arg01) == REAL_CST
7431 && ! TREE_CONSTANT_OVERFLOW (arg01)
7432 && operand_equal_p (arg1, arg00, 0))
7433 {
7434 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7435 REAL_VALUE_TYPE c;
7436 tree arg, arglist;
7437
7438 c = TREE_REAL_CST (arg01);
7439 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
7440 arg = build_real (type, c);
7441 arglist = build_tree_list (NULL_TREE, arg);
7442 arglist = tree_cons (NULL_TREE, arg1, arglist);
7443 return build_function_call_expr (powfn, arglist);
7444 }
7445 }
7446 }
7447 goto binary;
7448
7449 case TRUNC_DIV_EXPR:
7450 case ROUND_DIV_EXPR:
7451 case FLOOR_DIV_EXPR:
7452 case CEIL_DIV_EXPR:
7453 case EXACT_DIV_EXPR:
7454 if (integer_onep (arg1))
7455 return non_lvalue (fold_convert (type, arg0));
7456 if (integer_zerop (arg1))
7457 return t;
7458 /* X / -1 is -X. */
7459 if (!TYPE_UNSIGNED (type)
7460 && TREE_CODE (arg1) == INTEGER_CST
7461 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7462 && TREE_INT_CST_HIGH (arg1) == -1)
7463 return fold_convert (type, negate_expr (arg0));
7464
7465 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
7466 operation, EXACT_DIV_EXPR.
7467
7468 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
7469 At one time others generated faster code, it's not clear if they do
7470 after the last round to changes to the DIV code in expmed.c. */
7471 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
7472 && multiple_of_p (type, arg0, arg1))
7473 return fold (build2 (EXACT_DIV_EXPR, type, arg0, arg1));
7474
7475 if (TREE_CODE (arg1) == INTEGER_CST
7476 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7477 code, NULL_TREE)))
7478 return fold_convert (type, tem);
7479
7480 goto binary;
7481
7482 case CEIL_MOD_EXPR:
7483 case FLOOR_MOD_EXPR:
7484 case ROUND_MOD_EXPR:
7485 case TRUNC_MOD_EXPR:
7486 if (integer_onep (arg1))
7487 return omit_one_operand (type, integer_zero_node, arg0);
7488 if (integer_zerop (arg1))
7489 return t;
7490 /* X % -1 is zero. */
7491 if (!TYPE_UNSIGNED (type)
7492 && TREE_CODE (arg1) == INTEGER_CST
7493 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7494 && TREE_INT_CST_HIGH (arg1) == -1)
7495 return omit_one_operand (type, integer_zero_node, arg0);
7496
7497 if (TREE_CODE (arg1) == INTEGER_CST
7498 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7499 code, NULL_TREE)))
7500 return fold_convert (type, tem);
7501
7502 goto binary;
7503
7504 case LROTATE_EXPR:
7505 case RROTATE_EXPR:
7506 if (integer_all_onesp (arg0))
7507 return omit_one_operand (type, arg0, arg1);
7508 goto shift;
7509
7510 case RSHIFT_EXPR:
7511 /* Optimize -1 >> x for arithmetic right shifts. */
7512 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
7513 return omit_one_operand (type, arg0, arg1);
7514 /* ... fall through ... */
7515
7516 case LSHIFT_EXPR:
7517 shift:
7518 if (integer_zerop (arg1))
7519 return non_lvalue (fold_convert (type, arg0));
7520 if (integer_zerop (arg0))
7521 return omit_one_operand (type, arg0, arg1);
7522
7523 /* Since negative shift count is not well-defined,
7524 don't try to compute it in the compiler. */
7525 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
7526 return t;
7527 /* Rewrite an LROTATE_EXPR by a constant into an
7528 RROTATE_EXPR by a new constant. */
7529 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
7530 {
7531 tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
7532 tem = fold_convert (TREE_TYPE (arg1), tem);
7533 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
7534 return fold (build2 (RROTATE_EXPR, type, arg0, tem));
7535 }
7536
7537 /* If we have a rotate of a bit operation with the rotate count and
7538 the second operand of the bit operation both constant,
7539 permute the two operations. */
7540 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7541 && (TREE_CODE (arg0) == BIT_AND_EXPR
7542 || TREE_CODE (arg0) == BIT_IOR_EXPR
7543 || TREE_CODE (arg0) == BIT_XOR_EXPR)
7544 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7545 return fold (build2 (TREE_CODE (arg0), type,
7546 fold (build2 (code, type,
7547 TREE_OPERAND (arg0, 0), arg1)),
7548 fold (build2 (code, type,
7549 TREE_OPERAND (arg0, 1), arg1))));
7550
7551 /* Two consecutive rotates adding up to the width of the mode can
7552 be ignored. */
7553 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7554 && TREE_CODE (arg0) == RROTATE_EXPR
7555 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7556 && TREE_INT_CST_HIGH (arg1) == 0
7557 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
7558 && ((TREE_INT_CST_LOW (arg1)
7559 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
7560 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
7561 return TREE_OPERAND (arg0, 0);
7562
7563 goto binary;
7564
7565 case MIN_EXPR:
7566 if (operand_equal_p (arg0, arg1, 0))
7567 return omit_one_operand (type, arg0, arg1);
7568 if (INTEGRAL_TYPE_P (type)
7569 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
7570 return omit_one_operand (type, arg1, arg0);
7571 goto associate;
7572
7573 case MAX_EXPR:
7574 if (operand_equal_p (arg0, arg1, 0))
7575 return omit_one_operand (type, arg0, arg1);
7576 if (INTEGRAL_TYPE_P (type)
7577 && TYPE_MAX_VALUE (type)
7578 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
7579 return omit_one_operand (type, arg1, arg0);
7580 goto associate;
7581
7582 case TRUTH_NOT_EXPR:
7583 /* The argument to invert_truthvalue must have Boolean type. */
7584 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7585 arg0 = fold_convert (boolean_type_node, arg0);
7586
7587 /* Note that the operand of this must be an int
7588 and its values must be 0 or 1.
7589 ("true" is a fixed value perhaps depending on the language,
7590 but we don't handle values other than 1 correctly yet.) */
7591 tem = invert_truthvalue (arg0);
7592 /* Avoid infinite recursion. */
7593 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7594 {
7595 tem = fold_single_bit_test (code, arg0, arg1, type);
7596 if (tem)
7597 return tem;
7598 return t;
7599 }
7600 return fold_convert (type, tem);
7601
7602 case TRUTH_ANDIF_EXPR:
7603 /* Note that the operands of this must be ints
7604 and their values must be 0 or 1.
7605 ("true" is a fixed value perhaps depending on the language.) */
7606 /* If first arg is constant zero, return it. */
7607 if (integer_zerop (arg0))
7608 return fold_convert (type, arg0);
7609 case TRUTH_AND_EXPR:
7610 /* If either arg is constant true, drop it. */
7611 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7612 return non_lvalue (fold_convert (type, arg1));
7613 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7614 /* Preserve sequence points. */
7615 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7616 return non_lvalue (fold_convert (type, arg0));
7617 /* If second arg is constant zero, result is zero, but first arg
7618 must be evaluated. */
7619 if (integer_zerop (arg1))
7620 return omit_one_operand (type, arg1, arg0);
7621 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7622 case will be handled here. */
7623 if (integer_zerop (arg0))
7624 return omit_one_operand (type, arg0, arg1);
7625
7626 truth_andor:
7627 /* We only do these simplifications if we are optimizing. */
7628 if (!optimize)
7629 return t;
7630
7631 /* Check for things like (A || B) && (A || C). We can convert this
7632 to A || (B && C). Note that either operator can be any of the four
7633 truth and/or operations and the transformation will still be
7634 valid. Also note that we only care about order for the
7635 ANDIF and ORIF operators. If B contains side effects, this
7636 might change the truth-value of A. */
7637 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7638 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7639 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7640 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7641 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7642 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7643 {
7644 tree a00 = TREE_OPERAND (arg0, 0);
7645 tree a01 = TREE_OPERAND (arg0, 1);
7646 tree a10 = TREE_OPERAND (arg1, 0);
7647 tree a11 = TREE_OPERAND (arg1, 1);
7648 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7649 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7650 && (code == TRUTH_AND_EXPR
7651 || code == TRUTH_OR_EXPR));
7652
7653 if (operand_equal_p (a00, a10, 0))
7654 return fold (build2 (TREE_CODE (arg0), type, a00,
7655 fold (build2 (code, type, a01, a11))));
7656 else if (commutative && operand_equal_p (a00, a11, 0))
7657 return fold (build2 (TREE_CODE (arg0), type, a00,
7658 fold (build2 (code, type, a01, a10))));
7659 else if (commutative && operand_equal_p (a01, a10, 0))
7660 return fold (build2 (TREE_CODE (arg0), type, a01,
7661 fold (build2 (code, type, a00, a11))));
7662
7663 /* This case if tricky because we must either have commutative
7664 operators or else A10 must not have side-effects. */
7665
7666 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7667 && operand_equal_p (a01, a11, 0))
7668 return fold (build2 (TREE_CODE (arg0), type,
7669 fold (build2 (code, type, a00, a10)),
7670 a01));
7671 }
7672
7673 /* See if we can build a range comparison. */
7674 if (0 != (tem = fold_range_test (t)))
7675 return tem;
7676
7677 /* Check for the possibility of merging component references. If our
7678 lhs is another similar operation, try to merge its rhs with our
7679 rhs. Then try to merge our lhs and rhs. */
7680 if (TREE_CODE (arg0) == code
7681 && 0 != (tem = fold_truthop (code, type,
7682 TREE_OPERAND (arg0, 1), arg1)))
7683 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7684
7685 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7686 return tem;
7687
7688 return t;
7689
7690 case TRUTH_ORIF_EXPR:
7691 /* Note that the operands of this must be ints
7692 and their values must be 0 or true.
7693 ("true" is a fixed value perhaps depending on the language.) */
7694 /* If first arg is constant true, return it. */
7695 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7696 return fold_convert (type, arg0);
7697 case TRUTH_OR_EXPR:
7698 /* If either arg is constant zero, drop it. */
7699 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7700 return non_lvalue (fold_convert (type, arg1));
7701 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7702 /* Preserve sequence points. */
7703 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7704 return non_lvalue (fold_convert (type, arg0));
7705 /* If second arg is constant true, result is true, but we must
7706 evaluate first arg. */
7707 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7708 return omit_one_operand (type, arg1, arg0);
7709 /* Likewise for first arg, but note this only occurs here for
7710 TRUTH_OR_EXPR. */
7711 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7712 return omit_one_operand (type, arg0, arg1);
7713 goto truth_andor;
7714
7715 case TRUTH_XOR_EXPR:
7716 /* If either arg is constant zero, drop it. */
7717 if (integer_zerop (arg0))
7718 return non_lvalue (fold_convert (type, arg1));
7719 if (integer_zerop (arg1))
7720 return non_lvalue (fold_convert (type, arg0));
7721 /* If either arg is constant true, this is a logical inversion. */
7722 if (integer_onep (arg0))
7723 return non_lvalue (fold_convert (type, invert_truthvalue (arg1)));
7724 if (integer_onep (arg1))
7725 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7726 /* Identical arguments cancel to zero. */
7727 if (operand_equal_p (arg0, arg1, 0))
7728 return omit_one_operand (type, integer_zero_node, arg0);
7729 return t;
7730
7731 case EQ_EXPR:
7732 case NE_EXPR:
7733 case LT_EXPR:
7734 case GT_EXPR:
7735 case LE_EXPR:
7736 case GE_EXPR:
7737 /* If one arg is a real or integer constant, put it last. */
7738 if (tree_swap_operands_p (arg0, arg1, true))
7739 return fold (build2 (swap_tree_comparison (code), type, arg1, arg0));
7740
7741 /* If this is an equality comparison of the address of a non-weak
7742 object against zero, then we know the result. */
7743 if ((code == EQ_EXPR || code == NE_EXPR)
7744 && TREE_CODE (arg0) == ADDR_EXPR
7745 && DECL_P (TREE_OPERAND (arg0, 0))
7746 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7747 && integer_zerop (arg1))
7748 return constant_boolean_node (code != EQ_EXPR, type);
7749
7750 /* If this is an equality comparison of the address of two non-weak,
7751 unaliased symbols neither of which are extern (since we do not
7752 have access to attributes for externs), then we know the result. */
7753 if ((code == EQ_EXPR || code == NE_EXPR)
7754 && TREE_CODE (arg0) == ADDR_EXPR
7755 && DECL_P (TREE_OPERAND (arg0, 0))
7756 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7757 && ! lookup_attribute ("alias",
7758 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
7759 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
7760 && TREE_CODE (arg1) == ADDR_EXPR
7761 && DECL_P (TREE_OPERAND (arg1, 0))
7762 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
7763 && ! lookup_attribute ("alias",
7764 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
7765 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
7766 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
7767 ? code == EQ_EXPR : code != EQ_EXPR,
7768 type);
7769
7770 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7771 {
7772 tree targ0 = strip_float_extensions (arg0);
7773 tree targ1 = strip_float_extensions (arg1);
7774 tree newtype = TREE_TYPE (targ0);
7775
7776 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7777 newtype = TREE_TYPE (targ1);
7778
7779 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7780 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7781 return fold (build2 (code, type, fold_convert (newtype, targ0),
7782 fold_convert (newtype, targ1)));
7783
7784 /* (-a) CMP (-b) -> b CMP a */
7785 if (TREE_CODE (arg0) == NEGATE_EXPR
7786 && TREE_CODE (arg1) == NEGATE_EXPR)
7787 return fold (build2 (code, type, TREE_OPERAND (arg1, 0),
7788 TREE_OPERAND (arg0, 0)));
7789
7790 if (TREE_CODE (arg1) == REAL_CST)
7791 {
7792 REAL_VALUE_TYPE cst;
7793 cst = TREE_REAL_CST (arg1);
7794
7795 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7796 if (TREE_CODE (arg0) == NEGATE_EXPR)
7797 return
7798 fold (build2 (swap_tree_comparison (code), type,
7799 TREE_OPERAND (arg0, 0),
7800 build_real (TREE_TYPE (arg1),
7801 REAL_VALUE_NEGATE (cst))));
7802
7803 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7804 /* a CMP (-0) -> a CMP 0 */
7805 if (REAL_VALUE_MINUS_ZERO (cst))
7806 return fold (build2 (code, type, arg0,
7807 build_real (TREE_TYPE (arg1), dconst0)));
7808
7809 /* x != NaN is always true, other ops are always false. */
7810 if (REAL_VALUE_ISNAN (cst)
7811 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7812 {
7813 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7814 return omit_one_operand (type, tem, arg0);
7815 }
7816
7817 /* Fold comparisons against infinity. */
7818 if (REAL_VALUE_ISINF (cst))
7819 {
7820 tem = fold_inf_compare (code, type, arg0, arg1);
7821 if (tem != NULL_TREE)
7822 return tem;
7823 }
7824 }
7825
7826 /* If this is a comparison of a real constant with a PLUS_EXPR
7827 or a MINUS_EXPR of a real constant, we can convert it into a
7828 comparison with a revised real constant as long as no overflow
7829 occurs when unsafe_math_optimizations are enabled. */
7830 if (flag_unsafe_math_optimizations
7831 && TREE_CODE (arg1) == REAL_CST
7832 && (TREE_CODE (arg0) == PLUS_EXPR
7833 || TREE_CODE (arg0) == MINUS_EXPR)
7834 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7835 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7836 ? MINUS_EXPR : PLUS_EXPR,
7837 arg1, TREE_OPERAND (arg0, 1), 0))
7838 && ! TREE_CONSTANT_OVERFLOW (tem))
7839 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7840
7841 /* Likewise, we can simplify a comparison of a real constant with
7842 a MINUS_EXPR whose first operand is also a real constant, i.e.
7843 (c1 - x) < c2 becomes x > c1-c2. */
7844 if (flag_unsafe_math_optimizations
7845 && TREE_CODE (arg1) == REAL_CST
7846 && TREE_CODE (arg0) == MINUS_EXPR
7847 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7848 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7849 arg1, 0))
7850 && ! TREE_CONSTANT_OVERFLOW (tem))
7851 return fold (build2 (swap_tree_comparison (code), type,
7852 TREE_OPERAND (arg0, 1), tem));
7853
7854 /* Fold comparisons against built-in math functions. */
7855 if (TREE_CODE (arg1) == REAL_CST
7856 && flag_unsafe_math_optimizations
7857 && ! flag_errno_math)
7858 {
7859 enum built_in_function fcode = builtin_mathfn_code (arg0);
7860
7861 if (fcode != END_BUILTINS)
7862 {
7863 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7864 if (tem != NULL_TREE)
7865 return tem;
7866 }
7867 }
7868 }
7869
7870 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7871 if (TREE_CONSTANT (arg1)
7872 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7873 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7874 /* This optimization is invalid for ordered comparisons
7875 if CONST+INCR overflows or if foo+incr might overflow.
7876 This optimization is invalid for floating point due to rounding.
7877 For pointer types we assume overflow doesn't happen. */
7878 && (POINTER_TYPE_P (TREE_TYPE (arg0))
7879 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7880 && (code == EQ_EXPR || code == NE_EXPR))))
7881 {
7882 tree varop, newconst;
7883
7884 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7885 {
7886 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
7887 arg1, TREE_OPERAND (arg0, 1)));
7888 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7889 TREE_OPERAND (arg0, 0),
7890 TREE_OPERAND (arg0, 1));
7891 }
7892 else
7893 {
7894 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
7895 arg1, TREE_OPERAND (arg0, 1)));
7896 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7897 TREE_OPERAND (arg0, 0),
7898 TREE_OPERAND (arg0, 1));
7899 }
7900
7901
7902 /* If VAROP is a reference to a bitfield, we must mask
7903 the constant by the width of the field. */
7904 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7905 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
7906 && host_integerp (DECL_SIZE (TREE_OPERAND
7907 (TREE_OPERAND (varop, 0), 1)), 1))
7908 {
7909 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
7910 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
7911 tree folded_compare, shift;
7912
7913 /* First check whether the comparison would come out
7914 always the same. If we don't do that we would
7915 change the meaning with the masking. */
7916 folded_compare = fold (build2 (code, type,
7917 TREE_OPERAND (varop, 0), arg1));
7918 if (integer_zerop (folded_compare)
7919 || integer_onep (folded_compare))
7920 return omit_one_operand (type, folded_compare, varop);
7921
7922 shift = build_int_2 (TYPE_PRECISION (TREE_TYPE (varop)) - size,
7923 0);
7924 shift = fold_convert (TREE_TYPE (varop), shift);
7925 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
7926 newconst, shift));
7927 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
7928 newconst, shift));
7929 }
7930
7931 return fold (build2 (code, type, varop, newconst));
7932 }
7933
7934 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7935 This transformation affects the cases which are handled in later
7936 optimizations involving comparisons with non-negative constants. */
7937 if (TREE_CODE (arg1) == INTEGER_CST
7938 && TREE_CODE (arg0) != INTEGER_CST
7939 && tree_int_cst_sgn (arg1) > 0)
7940 {
7941 switch (code)
7942 {
7943 case GE_EXPR:
7944 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7945 return fold (build2 (GT_EXPR, type, arg0, arg1));
7946
7947 case LT_EXPR:
7948 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7949 return fold (build2 (LE_EXPR, type, arg0, arg1));
7950
7951 default:
7952 break;
7953 }
7954 }
7955
7956 /* Comparisons with the highest or lowest possible integer of
7957 the specified size will have known values.
7958
7959 This is quite similar to fold_relational_hi_lo; however, my
7960 attempts to share the code have been nothing but trouble.
7961 I give up for now. */
7962 {
7963 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7964
7965 if (TREE_CODE (arg1) == INTEGER_CST
7966 && ! TREE_CONSTANT_OVERFLOW (arg1)
7967 && width <= HOST_BITS_PER_WIDE_INT
7968 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7969 || POINTER_TYPE_P (TREE_TYPE (arg1))))
7970 {
7971 unsigned HOST_WIDE_INT signed_max;
7972 unsigned HOST_WIDE_INT max, min;
7973
7974 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7975
7976 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
7977 {
7978 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7979 min = 0;
7980 }
7981 else
7982 {
7983 max = signed_max;
7984 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7985 }
7986
7987 if (TREE_INT_CST_HIGH (arg1) == 0
7988 && TREE_INT_CST_LOW (arg1) == max)
7989 switch (code)
7990 {
7991 case GT_EXPR:
7992 return omit_one_operand (type, integer_zero_node, arg0);
7993
7994 case GE_EXPR:
7995 return fold (build2 (EQ_EXPR, type, arg0, arg1));
7996
7997 case LE_EXPR:
7998 return omit_one_operand (type, integer_one_node, arg0);
7999
8000 case LT_EXPR:
8001 return fold (build2 (NE_EXPR, type, arg0, arg1));
8002
8003 /* The GE_EXPR and LT_EXPR cases above are not normally
8004 reached because of previous transformations. */
8005
8006 default:
8007 break;
8008 }
8009 else if (TREE_INT_CST_HIGH (arg1) == 0
8010 && TREE_INT_CST_LOW (arg1) == max - 1)
8011 switch (code)
8012 {
8013 case GT_EXPR:
8014 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8015 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8016 case LE_EXPR:
8017 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8018 return fold (build2 (NE_EXPR, type, arg0, arg1));
8019 default:
8020 break;
8021 }
8022 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8023 && TREE_INT_CST_LOW (arg1) == min)
8024 switch (code)
8025 {
8026 case LT_EXPR:
8027 return omit_one_operand (type, integer_zero_node, arg0);
8028
8029 case LE_EXPR:
8030 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8031
8032 case GE_EXPR:
8033 return omit_one_operand (type, integer_one_node, arg0);
8034
8035 case GT_EXPR:
8036 return fold (build2 (NE_EXPR, type, arg0, arg1));
8037
8038 default:
8039 break;
8040 }
8041 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8042 && TREE_INT_CST_LOW (arg1) == min + 1)
8043 switch (code)
8044 {
8045 case GE_EXPR:
8046 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8047 return fold (build2 (NE_EXPR, type, arg0, arg1));
8048 case LT_EXPR:
8049 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8050 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8051 default:
8052 break;
8053 }
8054
8055 else if (!in_gimple_form
8056 && TREE_INT_CST_HIGH (arg1) == 0
8057 && TREE_INT_CST_LOW (arg1) == signed_max
8058 && TYPE_UNSIGNED (TREE_TYPE (arg1))
8059 /* signed_type does not work on pointer types. */
8060 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
8061 {
8062 /* The following case also applies to X < signed_max+1
8063 and X >= signed_max+1 because previous transformations. */
8064 if (code == LE_EXPR || code == GT_EXPR)
8065 {
8066 tree st0, st1;
8067 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
8068 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
8069 return fold
8070 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
8071 type, fold_convert (st0, arg0),
8072 fold_convert (st1, integer_zero_node)));
8073 }
8074 }
8075 }
8076 }
8077
8078 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
8079 a MINUS_EXPR of a constant, we can convert it into a comparison with
8080 a revised constant as long as no overflow occurs. */
8081 if ((code == EQ_EXPR || code == NE_EXPR)
8082 && TREE_CODE (arg1) == INTEGER_CST
8083 && (TREE_CODE (arg0) == PLUS_EXPR
8084 || TREE_CODE (arg0) == MINUS_EXPR)
8085 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8086 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8087 ? MINUS_EXPR : PLUS_EXPR,
8088 arg1, TREE_OPERAND (arg0, 1), 0))
8089 && ! TREE_CONSTANT_OVERFLOW (tem))
8090 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8091
8092 /* Similarly for a NEGATE_EXPR. */
8093 else if ((code == EQ_EXPR || code == NE_EXPR)
8094 && TREE_CODE (arg0) == NEGATE_EXPR
8095 && TREE_CODE (arg1) == INTEGER_CST
8096 && 0 != (tem = negate_expr (arg1))
8097 && TREE_CODE (tem) == INTEGER_CST
8098 && ! TREE_CONSTANT_OVERFLOW (tem))
8099 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8100
8101 /* If we have X - Y == 0, we can convert that to X == Y and similarly
8102 for !=. Don't do this for ordered comparisons due to overflow. */
8103 else if ((code == NE_EXPR || code == EQ_EXPR)
8104 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
8105 return fold (build2 (code, type,
8106 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
8107
8108 /* If we are widening one operand of an integer comparison,
8109 see if the other operand is similarly being widened. Perhaps we
8110 can do the comparison in the narrower type. */
8111 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8112 && TREE_CODE (arg0) == NOP_EXPR
8113 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
8114 && (code == EQ_EXPR || code == NE_EXPR
8115 || TYPE_UNSIGNED (TREE_TYPE (arg0))
8116 == TYPE_UNSIGNED (TREE_TYPE (tem)))
8117 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
8118 && (TREE_TYPE (t1) == TREE_TYPE (tem)
8119 || (TREE_CODE (t1) == INTEGER_CST
8120 && int_fits_type_p (t1, TREE_TYPE (tem)))))
8121 return fold (build2 (code, type, tem,
8122 fold_convert (TREE_TYPE (tem), t1)));
8123
8124 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8125 constant, we can simplify it. */
8126 else if (TREE_CODE (arg1) == INTEGER_CST
8127 && (TREE_CODE (arg0) == MIN_EXPR
8128 || TREE_CODE (arg0) == MAX_EXPR)
8129 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8130 return optimize_minmax_comparison (t);
8131
8132 /* If we are comparing an ABS_EXPR with a constant, we can
8133 convert all the cases into explicit comparisons, but they may
8134 well not be faster than doing the ABS and one comparison.
8135 But ABS (X) <= C is a range comparison, which becomes a subtraction
8136 and a comparison, and is probably faster. */
8137 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8138 && TREE_CODE (arg0) == ABS_EXPR
8139 && ! TREE_SIDE_EFFECTS (arg0)
8140 && (0 != (tem = negate_expr (arg1)))
8141 && TREE_CODE (tem) == INTEGER_CST
8142 && ! TREE_CONSTANT_OVERFLOW (tem))
8143 return fold (build2 (TRUTH_ANDIF_EXPR, type,
8144 build2 (GE_EXPR, type,
8145 TREE_OPERAND (arg0, 0), tem),
8146 build2 (LE_EXPR, type,
8147 TREE_OPERAND (arg0, 0), arg1)));
8148
8149 /* If this is an EQ or NE comparison with zero and ARG0 is
8150 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
8151 two operations, but the latter can be done in one less insn
8152 on machines that have only two-operand insns or on which a
8153 constant cannot be the first operand. */
8154 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
8155 && TREE_CODE (arg0) == BIT_AND_EXPR)
8156 {
8157 tree arg00 = TREE_OPERAND (arg0, 0);
8158 tree arg01 = TREE_OPERAND (arg0, 1);
8159 if (TREE_CODE (arg00) == LSHIFT_EXPR
8160 && integer_onep (TREE_OPERAND (arg00, 0)))
8161 return
8162 fold (build2 (code, type,
8163 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8164 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
8165 arg01, TREE_OPERAND (arg00, 1)),
8166 fold_convert (TREE_TYPE (arg0),
8167 integer_one_node)),
8168 arg1));
8169 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
8170 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
8171 return
8172 fold (build2 (code, type,
8173 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8174 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
8175 arg00, TREE_OPERAND (arg01, 1)),
8176 fold_convert (TREE_TYPE (arg0),
8177 integer_one_node)),
8178 arg1));
8179 }
8180
8181 /* If this is an NE or EQ comparison of zero against the result of a
8182 signed MOD operation whose second operand is a power of 2, make
8183 the MOD operation unsigned since it is simpler and equivalent. */
8184 if ((code == NE_EXPR || code == EQ_EXPR)
8185 && integer_zerop (arg1)
8186 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8187 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
8188 || TREE_CODE (arg0) == CEIL_MOD_EXPR
8189 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
8190 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
8191 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8192 {
8193 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
8194 tree newmod = build2 (TREE_CODE (arg0), newtype,
8195 fold_convert (newtype,
8196 TREE_OPERAND (arg0, 0)),
8197 fold_convert (newtype,
8198 TREE_OPERAND (arg0, 1)));
8199
8200 return build2 (code, type, newmod, fold_convert (newtype, arg1));
8201 }
8202
8203 /* If this is an NE comparison of zero with an AND of one, remove the
8204 comparison since the AND will give the correct value. */
8205 if (code == NE_EXPR && integer_zerop (arg1)
8206 && TREE_CODE (arg0) == BIT_AND_EXPR
8207 && integer_onep (TREE_OPERAND (arg0, 1)))
8208 return fold_convert (type, arg0);
8209
8210 /* If we have (A & C) == C where C is a power of 2, convert this into
8211 (A & C) != 0. Similarly for NE_EXPR. */
8212 if ((code == EQ_EXPR || code == NE_EXPR)
8213 && TREE_CODE (arg0) == BIT_AND_EXPR
8214 && integer_pow2p (TREE_OPERAND (arg0, 1))
8215 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8216 return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
8217 arg0, integer_zero_node));
8218
8219 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
8220 2, then fold the expression into shifts and logical operations. */
8221 tem = fold_single_bit_test (code, arg0, arg1, type);
8222 if (tem)
8223 return tem;
8224
8225 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
8226 Similarly for NE_EXPR. */
8227 if ((code == EQ_EXPR || code == NE_EXPR)
8228 && TREE_CODE (arg0) == BIT_AND_EXPR
8229 && TREE_CODE (arg1) == INTEGER_CST
8230 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8231 {
8232 tree dandnotc
8233 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8234 arg1, build1 (BIT_NOT_EXPR,
8235 TREE_TYPE (TREE_OPERAND (arg0, 1)),
8236 TREE_OPERAND (arg0, 1))));
8237 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8238 if (integer_nonzerop (dandnotc))
8239 return omit_one_operand (type, rslt, arg0);
8240 }
8241
8242 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
8243 Similarly for NE_EXPR. */
8244 if ((code == EQ_EXPR || code == NE_EXPR)
8245 && TREE_CODE (arg0) == BIT_IOR_EXPR
8246 && TREE_CODE (arg1) == INTEGER_CST
8247 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8248 {
8249 tree candnotd
8250 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8251 TREE_OPERAND (arg0, 1),
8252 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
8253 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8254 if (integer_nonzerop (candnotd))
8255 return omit_one_operand (type, rslt, arg0);
8256 }
8257
8258 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
8259 and similarly for >= into !=. */
8260 if ((code == LT_EXPR || code == GE_EXPR)
8261 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8262 && TREE_CODE (arg1) == LSHIFT_EXPR
8263 && integer_onep (TREE_OPERAND (arg1, 0)))
8264 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8265 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8266 TREE_OPERAND (arg1, 1)),
8267 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8268
8269 else if ((code == LT_EXPR || code == GE_EXPR)
8270 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8271 && (TREE_CODE (arg1) == NOP_EXPR
8272 || TREE_CODE (arg1) == CONVERT_EXPR)
8273 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
8274 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
8275 return
8276 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8277 fold_convert (TREE_TYPE (arg0),
8278 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8279 TREE_OPERAND (TREE_OPERAND (arg1, 0),
8280 1))),
8281 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8282
8283 /* Simplify comparison of something with itself. (For IEEE
8284 floating-point, we can only do some of these simplifications.) */
8285 if (operand_equal_p (arg0, arg1, 0))
8286 {
8287 switch (code)
8288 {
8289 case EQ_EXPR:
8290 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8291 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8292 return constant_boolean_node (1, type);
8293 break;
8294
8295 case GE_EXPR:
8296 case LE_EXPR:
8297 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8298 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8299 return constant_boolean_node (1, type);
8300 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8301
8302 case NE_EXPR:
8303 /* For NE, we can only do this simplification if integer
8304 or we don't honor IEEE floating point NaNs. */
8305 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8306 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8307 break;
8308 /* ... fall through ... */
8309 case GT_EXPR:
8310 case LT_EXPR:
8311 return constant_boolean_node (0, type);
8312 default:
8313 abort ();
8314 }
8315 }
8316
8317 /* If we are comparing an expression that just has comparisons
8318 of two integer values, arithmetic expressions of those comparisons,
8319 and constants, we can simplify it. There are only three cases
8320 to check: the two values can either be equal, the first can be
8321 greater, or the second can be greater. Fold the expression for
8322 those three values. Since each value must be 0 or 1, we have
8323 eight possibilities, each of which corresponds to the constant 0
8324 or 1 or one of the six possible comparisons.
8325
8326 This handles common cases like (a > b) == 0 but also handles
8327 expressions like ((x > y) - (y > x)) > 0, which supposedly
8328 occur in macroized code. */
8329
8330 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8331 {
8332 tree cval1 = 0, cval2 = 0;
8333 int save_p = 0;
8334
8335 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8336 /* Don't handle degenerate cases here; they should already
8337 have been handled anyway. */
8338 && cval1 != 0 && cval2 != 0
8339 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8340 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8341 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8342 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8343 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8344 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8345 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8346 {
8347 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8348 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8349
8350 /* We can't just pass T to eval_subst in case cval1 or cval2
8351 was the same as ARG1. */
8352
8353 tree high_result
8354 = fold (build2 (code, type,
8355 eval_subst (arg0, cval1, maxval,
8356 cval2, minval),
8357 arg1));
8358 tree equal_result
8359 = fold (build2 (code, type,
8360 eval_subst (arg0, cval1, maxval,
8361 cval2, maxval),
8362 arg1));
8363 tree low_result
8364 = fold (build2 (code, type,
8365 eval_subst (arg0, cval1, minval,
8366 cval2, maxval),
8367 arg1));
8368
8369 /* All three of these results should be 0 or 1. Confirm they
8370 are. Then use those values to select the proper code
8371 to use. */
8372
8373 if ((integer_zerop (high_result)
8374 || integer_onep (high_result))
8375 && (integer_zerop (equal_result)
8376 || integer_onep (equal_result))
8377 && (integer_zerop (low_result)
8378 || integer_onep (low_result)))
8379 {
8380 /* Make a 3-bit mask with the high-order bit being the
8381 value for `>', the next for '=', and the low for '<'. */
8382 switch ((integer_onep (high_result) * 4)
8383 + (integer_onep (equal_result) * 2)
8384 + integer_onep (low_result))
8385 {
8386 case 0:
8387 /* Always false. */
8388 return omit_one_operand (type, integer_zero_node, arg0);
8389 case 1:
8390 code = LT_EXPR;
8391 break;
8392 case 2:
8393 code = EQ_EXPR;
8394 break;
8395 case 3:
8396 code = LE_EXPR;
8397 break;
8398 case 4:
8399 code = GT_EXPR;
8400 break;
8401 case 5:
8402 code = NE_EXPR;
8403 break;
8404 case 6:
8405 code = GE_EXPR;
8406 break;
8407 case 7:
8408 /* Always true. */
8409 return omit_one_operand (type, integer_one_node, arg0);
8410 }
8411
8412 tem = build2 (code, type, cval1, cval2);
8413 if (save_p)
8414 return save_expr (tem);
8415 else
8416 return fold (tem);
8417 }
8418 }
8419 }
8420
8421 /* If this is a comparison of a field, we may be able to simplify it. */
8422 if (((TREE_CODE (arg0) == COMPONENT_REF
8423 && lang_hooks.can_use_bit_fields_p ())
8424 || TREE_CODE (arg0) == BIT_FIELD_REF)
8425 && (code == EQ_EXPR || code == NE_EXPR)
8426 /* Handle the constant case even without -O
8427 to make sure the warnings are given. */
8428 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
8429 {
8430 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
8431 if (t1)
8432 return t1;
8433 }
8434
8435 /* If this is a comparison of complex values and either or both sides
8436 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
8437 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
8438 This may prevent needless evaluations. */
8439 if ((code == EQ_EXPR || code == NE_EXPR)
8440 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
8441 && (TREE_CODE (arg0) == COMPLEX_EXPR
8442 || TREE_CODE (arg1) == COMPLEX_EXPR
8443 || TREE_CODE (arg0) == COMPLEX_CST
8444 || TREE_CODE (arg1) == COMPLEX_CST))
8445 {
8446 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
8447 tree real0, imag0, real1, imag1;
8448
8449 arg0 = save_expr (arg0);
8450 arg1 = save_expr (arg1);
8451 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
8452 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
8453 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
8454 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
8455
8456 return fold (build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
8457 : TRUTH_ORIF_EXPR),
8458 type,
8459 fold (build2 (code, type, real0, real1)),
8460 fold (build2 (code, type, imag0, imag1))));
8461 }
8462
8463 /* Optimize comparisons of strlen vs zero to a compare of the
8464 first character of the string vs zero. To wit,
8465 strlen(ptr) == 0 => *ptr == 0
8466 strlen(ptr) != 0 => *ptr != 0
8467 Other cases should reduce to one of these two (or a constant)
8468 due to the return value of strlen being unsigned. */
8469 if ((code == EQ_EXPR || code == NE_EXPR)
8470 && integer_zerop (arg1)
8471 && TREE_CODE (arg0) == CALL_EXPR)
8472 {
8473 tree fndecl = get_callee_fndecl (arg0);
8474 tree arglist;
8475
8476 if (fndecl
8477 && DECL_BUILT_IN (fndecl)
8478 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
8479 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
8480 && (arglist = TREE_OPERAND (arg0, 1))
8481 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
8482 && ! TREE_CHAIN (arglist))
8483 return fold (build2 (code, type,
8484 build1 (INDIRECT_REF, char_type_node,
8485 TREE_VALUE(arglist)),
8486 integer_zero_node));
8487 }
8488
8489 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8490 into a single range test. */
8491 if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
8492 && TREE_CODE (arg1) == INTEGER_CST
8493 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8494 && !integer_zerop (TREE_OPERAND (arg0, 1))
8495 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8496 && !TREE_OVERFLOW (arg1))
8497 {
8498 t1 = fold_div_compare (code, type, arg0, arg1);
8499 if (t1 != NULL_TREE)
8500 return t1;
8501 }
8502
8503 /* Both ARG0 and ARG1 are known to be constants at this point. */
8504 t1 = fold_relational_const (code, type, arg0, arg1);
8505 return (t1 == NULL_TREE ? t : t1);
8506
8507 case UNORDERED_EXPR:
8508 case ORDERED_EXPR:
8509 case UNLT_EXPR:
8510 case UNLE_EXPR:
8511 case UNGT_EXPR:
8512 case UNGE_EXPR:
8513 case UNEQ_EXPR:
8514 case LTGT_EXPR:
8515 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8516 {
8517 t1 = fold_relational_const (code, type, arg0, arg1);
8518 if (t1 != NULL_TREE)
8519 return t1;
8520 }
8521
8522 /* If the first operand is NaN, the result is constant. */
8523 if (TREE_CODE (arg0) == REAL_CST
8524 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
8525 && (code != LTGT_EXPR || ! flag_trapping_math))
8526 {
8527 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
8528 ? integer_zero_node
8529 : integer_one_node;
8530 return omit_one_operand (type, t1, arg1);
8531 }
8532
8533 /* If the second operand is NaN, the result is constant. */
8534 if (TREE_CODE (arg1) == REAL_CST
8535 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
8536 && (code != LTGT_EXPR || ! flag_trapping_math))
8537 {
8538 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
8539 ? integer_zero_node
8540 : integer_one_node;
8541 return omit_one_operand (type, t1, arg0);
8542 }
8543
8544 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8545 {
8546 tree targ0 = strip_float_extensions (arg0);
8547 tree targ1 = strip_float_extensions (arg1);
8548 tree newtype = TREE_TYPE (targ0);
8549
8550 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8551 newtype = TREE_TYPE (targ1);
8552
8553 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8554 return fold (build2 (code, type, fold_convert (newtype, targ0),
8555 fold_convert (newtype, targ1)));
8556 }
8557
8558 return t;
8559
8560 case COND_EXPR:
8561 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
8562 so all simple results must be passed through pedantic_non_lvalue. */
8563 if (TREE_CODE (arg0) == INTEGER_CST)
8564 {
8565 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
8566 /* Only optimize constant conditions when the selected branch
8567 has the same type as the COND_EXPR. This avoids optimizing
8568 away "c ? x : throw", where the throw has a void type. */
8569 if (! VOID_TYPE_P (TREE_TYPE (tem))
8570 || VOID_TYPE_P (type))
8571 return pedantic_non_lvalue (tem);
8572 return t;
8573 }
8574 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
8575 return pedantic_omit_one_operand (type, arg1, arg0);
8576
8577 /* If we have A op B ? A : C, we may be able to convert this to a
8578 simpler expression, depending on the operation and the values
8579 of B and C. Signed zeros prevent all of these transformations,
8580 for reasons given above each one.
8581
8582 Also try swapping the arguments and inverting the conditional. */
8583 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8584 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8585 arg1, TREE_OPERAND (arg0, 1))
8586 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
8587 {
8588 tem = fold_cond_expr_with_comparison (type, arg0,
8589 TREE_OPERAND (t, 2));
8590 if (tem)
8591 return tem;
8592 }
8593
8594 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8595 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8596 TREE_OPERAND (t, 2),
8597 TREE_OPERAND (arg0, 1))
8598 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 2)))))
8599 {
8600 tem = invert_truthvalue (arg0);
8601 if (TREE_CODE_CLASS (TREE_CODE (tem)) == '<')
8602 {
8603 tem = fold_cond_expr_with_comparison (type, tem, arg1);
8604 if (tem)
8605 return tem;
8606 }
8607 }
8608
8609 /* If the second operand is simpler than the third, swap them
8610 since that produces better jump optimization results. */
8611 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8612 TREE_OPERAND (t, 2), false))
8613 {
8614 /* See if this can be inverted. If it can't, possibly because
8615 it was a floating-point inequality comparison, don't do
8616 anything. */
8617 tem = invert_truthvalue (arg0);
8618
8619 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8620 return fold (build3 (code, type, tem,
8621 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8622 }
8623
8624 /* Convert A ? 1 : 0 to simply A. */
8625 if (integer_onep (TREE_OPERAND (t, 1))
8626 && integer_zerop (TREE_OPERAND (t, 2))
8627 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8628 call to fold will try to move the conversion inside
8629 a COND, which will recurse. In that case, the COND_EXPR
8630 is probably the best choice, so leave it alone. */
8631 && type == TREE_TYPE (arg0))
8632 return pedantic_non_lvalue (arg0);
8633
8634 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8635 over COND_EXPR in cases such as floating point comparisons. */
8636 if (integer_zerop (TREE_OPERAND (t, 1))
8637 && integer_onep (TREE_OPERAND (t, 2))
8638 && truth_value_p (TREE_CODE (arg0)))
8639 return pedantic_non_lvalue (fold_convert (type,
8640 invert_truthvalue (arg0)));
8641
8642 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
8643 if (TREE_CODE (arg0) == LT_EXPR
8644 && integer_zerop (TREE_OPERAND (arg0, 1))
8645 && integer_zerop (TREE_OPERAND (t, 2))
8646 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
8647 return fold_convert (type, fold (build2 (BIT_AND_EXPR,
8648 TREE_TYPE (tem), tem, arg1)));
8649
8650 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
8651 already handled above. */
8652 if (TREE_CODE (arg0) == BIT_AND_EXPR
8653 && integer_onep (TREE_OPERAND (arg0, 1))
8654 && integer_zerop (TREE_OPERAND (t, 2))
8655 && integer_pow2p (arg1))
8656 {
8657 tree tem = TREE_OPERAND (arg0, 0);
8658 STRIP_NOPS (tem);
8659 if (TREE_CODE (tem) == RSHIFT_EXPR
8660 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
8661 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
8662 return fold (build2 (BIT_AND_EXPR, type,
8663 TREE_OPERAND (tem, 0), arg1));
8664 }
8665
8666 /* A & N ? N : 0 is simply A & N if N is a power of two. This
8667 is probably obsolete because the first operand should be a
8668 truth value (that's why we have the two cases above), but let's
8669 leave it in until we can confirm this for all front-ends. */
8670 if (integer_zerop (TREE_OPERAND (t, 2))
8671 && TREE_CODE (arg0) == NE_EXPR
8672 && integer_zerop (TREE_OPERAND (arg0, 1))
8673 && integer_pow2p (arg1)
8674 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8675 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8676 arg1, OEP_ONLY_CONST))
8677 return pedantic_non_lvalue (fold_convert (type,
8678 TREE_OPERAND (arg0, 0)));
8679
8680 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8681 if (integer_zerop (TREE_OPERAND (t, 2))
8682 && truth_value_p (TREE_CODE (arg0))
8683 && truth_value_p (TREE_CODE (arg1)))
8684 return fold (build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1));
8685
8686 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8687 if (integer_onep (TREE_OPERAND (t, 2))
8688 && truth_value_p (TREE_CODE (arg0))
8689 && truth_value_p (TREE_CODE (arg1)))
8690 {
8691 /* Only perform transformation if ARG0 is easily inverted. */
8692 tem = invert_truthvalue (arg0);
8693 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8694 return fold (build2 (TRUTH_ORIF_EXPR, type, tem, arg1));
8695 }
8696
8697 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
8698 if (integer_zerop (arg1)
8699 && truth_value_p (TREE_CODE (arg0))
8700 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
8701 {
8702 /* Only perform transformation if ARG0 is easily inverted. */
8703 tem = invert_truthvalue (arg0);
8704 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8705 return fold (build2 (TRUTH_ANDIF_EXPR, type, tem,
8706 TREE_OPERAND (t, 2)));
8707 }
8708
8709 /* Convert A ? 1 : B into A || B if A and B are truth values. */
8710 if (integer_onep (arg1)
8711 && truth_value_p (TREE_CODE (arg0))
8712 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
8713 return fold (build2 (TRUTH_ORIF_EXPR, type, arg0,
8714 TREE_OPERAND (t, 2)));
8715
8716 return t;
8717
8718 case COMPOUND_EXPR:
8719 /* When pedantic, a compound expression can be neither an lvalue
8720 nor an integer constant expression. */
8721 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
8722 return t;
8723 /* Don't let (0, 0) be null pointer constant. */
8724 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
8725 : fold_convert (type, arg1);
8726 return pedantic_non_lvalue (tem);
8727
8728 case COMPLEX_EXPR:
8729 if (wins)
8730 return build_complex (type, arg0, arg1);
8731 return t;
8732
8733 case REALPART_EXPR:
8734 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8735 return t;
8736 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8737 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8738 TREE_OPERAND (arg0, 1));
8739 else if (TREE_CODE (arg0) == COMPLEX_CST)
8740 return TREE_REALPART (arg0);
8741 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8742 return fold (build2 (TREE_CODE (arg0), type,
8743 fold (build1 (REALPART_EXPR, type,
8744 TREE_OPERAND (arg0, 0))),
8745 fold (build1 (REALPART_EXPR, type,
8746 TREE_OPERAND (arg0, 1)))));
8747 return t;
8748
8749 case IMAGPART_EXPR:
8750 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8751 return fold_convert (type, integer_zero_node);
8752 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8753 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8754 TREE_OPERAND (arg0, 0));
8755 else if (TREE_CODE (arg0) == COMPLEX_CST)
8756 return TREE_IMAGPART (arg0);
8757 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8758 return fold (build2 (TREE_CODE (arg0), type,
8759 fold (build1 (IMAGPART_EXPR, type,
8760 TREE_OPERAND (arg0, 0))),
8761 fold (build1 (IMAGPART_EXPR, type,
8762 TREE_OPERAND (arg0, 1)))));
8763 return t;
8764
8765 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8766 appropriate. */
8767 case CLEANUP_POINT_EXPR:
8768 if (! has_cleanups (arg0))
8769 return TREE_OPERAND (t, 0);
8770
8771 {
8772 enum tree_code code0 = TREE_CODE (arg0);
8773 int kind0 = TREE_CODE_CLASS (code0);
8774 tree arg00 = TREE_OPERAND (arg0, 0);
8775 tree arg01;
8776
8777 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8778 return fold (build1 (code0, type,
8779 fold (build1 (CLEANUP_POINT_EXPR,
8780 TREE_TYPE (arg00), arg00))));
8781
8782 if (kind0 == '<' || kind0 == '2'
8783 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8784 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8785 || code0 == TRUTH_XOR_EXPR)
8786 {
8787 arg01 = TREE_OPERAND (arg0, 1);
8788
8789 if (TREE_CONSTANT (arg00)
8790 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8791 && ! has_cleanups (arg00)))
8792 return fold (build2 (code0, type, arg00,
8793 fold (build1 (CLEANUP_POINT_EXPR,
8794 TREE_TYPE (arg01), arg01))));
8795
8796 if (TREE_CONSTANT (arg01))
8797 return fold (build2 (code0, type,
8798 fold (build1 (CLEANUP_POINT_EXPR,
8799 TREE_TYPE (arg00), arg00)),
8800 arg01));
8801 }
8802
8803 return t;
8804 }
8805
8806 case CALL_EXPR:
8807 /* Check for a built-in function. */
8808 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
8809 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
8810 == FUNCTION_DECL)
8811 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
8812 {
8813 tree tmp = fold_builtin (t);
8814 if (tmp)
8815 return tmp;
8816 }
8817 return t;
8818
8819 default:
8820 return t;
8821 } /* switch (code) */
8822 }
8823
8824 #ifdef ENABLE_FOLD_CHECKING
8825 #undef fold
8826
8827 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8828 static void fold_check_failed (tree, tree);
8829 void print_fold_checksum (tree);
8830
8831 /* When --enable-checking=fold, compute a digest of expr before
8832 and after actual fold call to see if fold did not accidentally
8833 change original expr. */
8834
8835 tree
8836 fold (tree expr)
8837 {
8838 tree ret;
8839 struct md5_ctx ctx;
8840 unsigned char checksum_before[16], checksum_after[16];
8841 htab_t ht;
8842
8843 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8844 md5_init_ctx (&ctx);
8845 fold_checksum_tree (expr, &ctx, ht);
8846 md5_finish_ctx (&ctx, checksum_before);
8847 htab_empty (ht);
8848
8849 ret = fold_1 (expr);
8850
8851 md5_init_ctx (&ctx);
8852 fold_checksum_tree (expr, &ctx, ht);
8853 md5_finish_ctx (&ctx, checksum_after);
8854 htab_delete (ht);
8855
8856 if (memcmp (checksum_before, checksum_after, 16))
8857 fold_check_failed (expr, ret);
8858
8859 return ret;
8860 }
8861
8862 void
8863 print_fold_checksum (tree expr)
8864 {
8865 struct md5_ctx ctx;
8866 unsigned char checksum[16], cnt;
8867 htab_t ht;
8868
8869 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8870 md5_init_ctx (&ctx);
8871 fold_checksum_tree (expr, &ctx, ht);
8872 md5_finish_ctx (&ctx, checksum);
8873 htab_delete (ht);
8874 for (cnt = 0; cnt < 16; ++cnt)
8875 fprintf (stderr, "%02x", checksum[cnt]);
8876 putc ('\n', stderr);
8877 }
8878
8879 static void
8880 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8881 {
8882 internal_error ("fold check: original tree changed by fold");
8883 }
8884
8885 static void
8886 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8887 {
8888 void **slot;
8889 enum tree_code code;
8890 char buf[sizeof (struct tree_decl)];
8891 int i, len;
8892
8893 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8894 > sizeof (struct tree_decl)
8895 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8896 abort ();
8897 if (expr == NULL)
8898 return;
8899 slot = htab_find_slot (ht, expr, INSERT);
8900 if (*slot != NULL)
8901 return;
8902 *slot = expr;
8903 code = TREE_CODE (expr);
8904 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8905 {
8906 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8907 memcpy (buf, expr, tree_size (expr));
8908 expr = (tree) buf;
8909 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8910 }
8911 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8912 {
8913 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8914 memcpy (buf, expr, tree_size (expr));
8915 expr = (tree) buf;
8916 SET_DECL_ASSEMBLER_NAME (expr, NULL);
8917 }
8918 else if (TREE_CODE_CLASS (code) == 't'
8919 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8920 {
8921 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8922 memcpy (buf, expr, tree_size (expr));
8923 expr = (tree) buf;
8924 TYPE_POINTER_TO (expr) = NULL;
8925 TYPE_REFERENCE_TO (expr) = NULL;
8926 }
8927 md5_process_bytes (expr, tree_size (expr), ctx);
8928 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8929 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8930 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8931 len = TREE_CODE_LENGTH (code);
8932 switch (TREE_CODE_CLASS (code))
8933 {
8934 case 'c':
8935 switch (code)
8936 {
8937 case STRING_CST:
8938 md5_process_bytes (TREE_STRING_POINTER (expr),
8939 TREE_STRING_LENGTH (expr), ctx);
8940 break;
8941 case COMPLEX_CST:
8942 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8943 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8944 break;
8945 case VECTOR_CST:
8946 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8947 break;
8948 default:
8949 break;
8950 }
8951 break;
8952 case 'x':
8953 switch (code)
8954 {
8955 case TREE_LIST:
8956 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8957 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8958 break;
8959 case TREE_VEC:
8960 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8961 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8962 break;
8963 default:
8964 break;
8965 }
8966 break;
8967 case 'e':
8968 switch (code)
8969 {
8970 case SAVE_EXPR: len = 2; break;
8971 case GOTO_SUBROUTINE_EXPR: len = 0; break;
8972 case RTL_EXPR: len = 0; break;
8973 case WITH_CLEANUP_EXPR: len = 2; break;
8974 default: break;
8975 }
8976 /* Fall through. */
8977 case 'r':
8978 case '<':
8979 case '1':
8980 case '2':
8981 case 's':
8982 for (i = 0; i < len; ++i)
8983 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8984 break;
8985 case 'd':
8986 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8987 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8988 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8989 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8990 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8991 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8992 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8993 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8994 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8995 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8996 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8997 break;
8998 case 't':
8999 if (TREE_CODE (expr) == ENUMERAL_TYPE)
9000 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
9001 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
9002 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
9003 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
9004 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
9005 if (INTEGRAL_TYPE_P (expr)
9006 || SCALAR_FLOAT_TYPE_P (expr))
9007 {
9008 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
9009 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
9010 }
9011 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
9012 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
9013 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
9014 break;
9015 default:
9016 break;
9017 }
9018 }
9019
9020 #endif
9021
9022 /* Perform constant folding and related simplification of initializer
9023 expression EXPR. This behaves identically to "fold" but ignores
9024 potential run-time traps and exceptions that fold must preserve. */
9025
9026 tree
9027 fold_initializer (tree expr)
9028 {
9029 int saved_signaling_nans = flag_signaling_nans;
9030 int saved_trapping_math = flag_trapping_math;
9031 int saved_trapv = flag_trapv;
9032 tree result;
9033
9034 flag_signaling_nans = 0;
9035 flag_trapping_math = 0;
9036 flag_trapv = 0;
9037
9038 result = fold (expr);
9039
9040 flag_signaling_nans = saved_signaling_nans;
9041 flag_trapping_math = saved_trapping_math;
9042 flag_trapv = saved_trapv;
9043
9044 return result;
9045 }
9046
9047 /* Determine if first argument is a multiple of second argument. Return 0 if
9048 it is not, or we cannot easily determined it to be.
9049
9050 An example of the sort of thing we care about (at this point; this routine
9051 could surely be made more general, and expanded to do what the *_DIV_EXPR's
9052 fold cases do now) is discovering that
9053
9054 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9055
9056 is a multiple of
9057
9058 SAVE_EXPR (J * 8)
9059
9060 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
9061
9062 This code also handles discovering that
9063
9064 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9065
9066 is a multiple of 8 so we don't have to worry about dealing with a
9067 possible remainder.
9068
9069 Note that we *look* inside a SAVE_EXPR only to determine how it was
9070 calculated; it is not safe for fold to do much of anything else with the
9071 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
9072 at run time. For example, the latter example above *cannot* be implemented
9073 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
9074 evaluation time of the original SAVE_EXPR is not necessarily the same at
9075 the time the new expression is evaluated. The only optimization of this
9076 sort that would be valid is changing
9077
9078 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
9079
9080 divided by 8 to
9081
9082 SAVE_EXPR (I) * SAVE_EXPR (J)
9083
9084 (where the same SAVE_EXPR (J) is used in the original and the
9085 transformed version). */
9086
9087 static int
9088 multiple_of_p (tree type, tree top, tree bottom)
9089 {
9090 if (operand_equal_p (top, bottom, 0))
9091 return 1;
9092
9093 if (TREE_CODE (type) != INTEGER_TYPE)
9094 return 0;
9095
9096 switch (TREE_CODE (top))
9097 {
9098 case MULT_EXPR:
9099 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9100 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9101
9102 case PLUS_EXPR:
9103 case MINUS_EXPR:
9104 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9105 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9106
9107 case LSHIFT_EXPR:
9108 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
9109 {
9110 tree op1, t1;
9111
9112 op1 = TREE_OPERAND (top, 1);
9113 /* const_binop may not detect overflow correctly,
9114 so check for it explicitly here. */
9115 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
9116 > TREE_INT_CST_LOW (op1)
9117 && TREE_INT_CST_HIGH (op1) == 0
9118 && 0 != (t1 = fold_convert (type,
9119 const_binop (LSHIFT_EXPR,
9120 size_one_node,
9121 op1, 0)))
9122 && ! TREE_OVERFLOW (t1))
9123 return multiple_of_p (type, t1, bottom);
9124 }
9125 return 0;
9126
9127 case NOP_EXPR:
9128 /* Can't handle conversions from non-integral or wider integral type. */
9129 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
9130 || (TYPE_PRECISION (type)
9131 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
9132 return 0;
9133
9134 /* .. fall through ... */
9135
9136 case SAVE_EXPR:
9137 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
9138
9139 case INTEGER_CST:
9140 if (TREE_CODE (bottom) != INTEGER_CST
9141 || (TYPE_UNSIGNED (type)
9142 && (tree_int_cst_sgn (top) < 0
9143 || tree_int_cst_sgn (bottom) < 0)))
9144 return 0;
9145 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
9146 top, bottom, 0));
9147
9148 default:
9149 return 0;
9150 }
9151 }
9152
9153 /* Return true if `t' is known to be non-negative. */
9154
9155 int
9156 tree_expr_nonnegative_p (tree t)
9157 {
9158 switch (TREE_CODE (t))
9159 {
9160 case ABS_EXPR:
9161 return 1;
9162
9163 case INTEGER_CST:
9164 return tree_int_cst_sgn (t) >= 0;
9165
9166 case REAL_CST:
9167 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
9168
9169 case PLUS_EXPR:
9170 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9171 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9172 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9173
9174 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
9175 both unsigned and at least 2 bits shorter than the result. */
9176 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9177 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9178 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9179 {
9180 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9181 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9182 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9183 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9184 {
9185 unsigned int prec = MAX (TYPE_PRECISION (inner1),
9186 TYPE_PRECISION (inner2)) + 1;
9187 return prec < TYPE_PRECISION (TREE_TYPE (t));
9188 }
9189 }
9190 break;
9191
9192 case MULT_EXPR:
9193 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9194 {
9195 /* x * x for floating point x is always non-negative. */
9196 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
9197 return 1;
9198 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9199 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9200 }
9201
9202 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
9203 both unsigned and their total bits is shorter than the result. */
9204 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9205 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9206 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9207 {
9208 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9209 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9210 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9211 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9212 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
9213 < TYPE_PRECISION (TREE_TYPE (t));
9214 }
9215 return 0;
9216
9217 case TRUNC_DIV_EXPR:
9218 case CEIL_DIV_EXPR:
9219 case FLOOR_DIV_EXPR:
9220 case ROUND_DIV_EXPR:
9221 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9222 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9223
9224 case TRUNC_MOD_EXPR:
9225 case CEIL_MOD_EXPR:
9226 case FLOOR_MOD_EXPR:
9227 case ROUND_MOD_EXPR:
9228 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9229
9230 case RDIV_EXPR:
9231 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9232 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9233
9234 case BIT_AND_EXPR:
9235 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9236 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9237 case BIT_IOR_EXPR:
9238 case BIT_XOR_EXPR:
9239 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9240 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9241
9242 case NOP_EXPR:
9243 {
9244 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9245 tree outer_type = TREE_TYPE (t);
9246
9247 if (TREE_CODE (outer_type) == REAL_TYPE)
9248 {
9249 if (TREE_CODE (inner_type) == REAL_TYPE)
9250 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9251 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9252 {
9253 if (TYPE_UNSIGNED (inner_type))
9254 return 1;
9255 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9256 }
9257 }
9258 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
9259 {
9260 if (TREE_CODE (inner_type) == REAL_TYPE)
9261 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
9262 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9263 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
9264 && TYPE_UNSIGNED (inner_type);
9265 }
9266 }
9267 break;
9268
9269 case COND_EXPR:
9270 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9271 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
9272 case COMPOUND_EXPR:
9273 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9274 case MIN_EXPR:
9275 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9276 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9277 case MAX_EXPR:
9278 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9279 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9280 case MODIFY_EXPR:
9281 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9282 case BIND_EXPR:
9283 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
9284 case SAVE_EXPR:
9285 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9286 case NON_LVALUE_EXPR:
9287 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9288 case FLOAT_EXPR:
9289 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9290 case RTL_EXPR:
9291 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
9292
9293 case TARGET_EXPR:
9294 {
9295 tree temp = TARGET_EXPR_SLOT (t);
9296 t = TARGET_EXPR_INITIAL (t);
9297
9298 /* If the initializer is non-void, then it's a normal expression
9299 that will be assigned to the slot. */
9300 if (!VOID_TYPE_P (t))
9301 return tree_expr_nonnegative_p (t);
9302
9303 /* Otherwise, the initializer sets the slot in some way. One common
9304 way is an assignment statement at the end of the initializer. */
9305 while (1)
9306 {
9307 if (TREE_CODE (t) == BIND_EXPR)
9308 t = expr_last (BIND_EXPR_BODY (t));
9309 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
9310 || TREE_CODE (t) == TRY_CATCH_EXPR)
9311 t = expr_last (TREE_OPERAND (t, 0));
9312 else if (TREE_CODE (t) == STATEMENT_LIST)
9313 t = expr_last (t);
9314 else
9315 break;
9316 }
9317 if (TREE_CODE (t) == MODIFY_EXPR
9318 && TREE_OPERAND (t, 0) == temp)
9319 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9320
9321 return 0;
9322 }
9323
9324 case CALL_EXPR:
9325 {
9326 tree fndecl = get_callee_fndecl (t);
9327 tree arglist = TREE_OPERAND (t, 1);
9328 if (fndecl
9329 && DECL_BUILT_IN (fndecl)
9330 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
9331 switch (DECL_FUNCTION_CODE (fndecl))
9332 {
9333 #define CASE_BUILTIN_F(BUILT_IN_FN) \
9334 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
9335 #define CASE_BUILTIN_I(BUILT_IN_FN) \
9336 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
9337
9338 CASE_BUILTIN_F (BUILT_IN_ACOS)
9339 CASE_BUILTIN_F (BUILT_IN_ACOSH)
9340 CASE_BUILTIN_F (BUILT_IN_CABS)
9341 CASE_BUILTIN_F (BUILT_IN_COSH)
9342 CASE_BUILTIN_F (BUILT_IN_ERFC)
9343 CASE_BUILTIN_F (BUILT_IN_EXP)
9344 CASE_BUILTIN_F (BUILT_IN_EXP10)
9345 CASE_BUILTIN_F (BUILT_IN_EXP2)
9346 CASE_BUILTIN_F (BUILT_IN_FABS)
9347 CASE_BUILTIN_F (BUILT_IN_FDIM)
9348 CASE_BUILTIN_F (BUILT_IN_FREXP)
9349 CASE_BUILTIN_F (BUILT_IN_HYPOT)
9350 CASE_BUILTIN_F (BUILT_IN_POW10)
9351 CASE_BUILTIN_I (BUILT_IN_FFS)
9352 CASE_BUILTIN_I (BUILT_IN_PARITY)
9353 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
9354 /* Always true. */
9355 return 1;
9356
9357 CASE_BUILTIN_F (BUILT_IN_SQRT)
9358 /* sqrt(-0.0) is -0.0. */
9359 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
9360 return 1;
9361 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9362
9363 CASE_BUILTIN_F (BUILT_IN_ASINH)
9364 CASE_BUILTIN_F (BUILT_IN_ATAN)
9365 CASE_BUILTIN_F (BUILT_IN_ATANH)
9366 CASE_BUILTIN_F (BUILT_IN_CBRT)
9367 CASE_BUILTIN_F (BUILT_IN_CEIL)
9368 CASE_BUILTIN_F (BUILT_IN_ERF)
9369 CASE_BUILTIN_F (BUILT_IN_EXPM1)
9370 CASE_BUILTIN_F (BUILT_IN_FLOOR)
9371 CASE_BUILTIN_F (BUILT_IN_FMOD)
9372 CASE_BUILTIN_F (BUILT_IN_LDEXP)
9373 CASE_BUILTIN_F (BUILT_IN_LLRINT)
9374 CASE_BUILTIN_F (BUILT_IN_LLROUND)
9375 CASE_BUILTIN_F (BUILT_IN_LRINT)
9376 CASE_BUILTIN_F (BUILT_IN_LROUND)
9377 CASE_BUILTIN_F (BUILT_IN_MODF)
9378 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
9379 CASE_BUILTIN_F (BUILT_IN_POW)
9380 CASE_BUILTIN_F (BUILT_IN_RINT)
9381 CASE_BUILTIN_F (BUILT_IN_ROUND)
9382 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
9383 CASE_BUILTIN_F (BUILT_IN_SINH)
9384 CASE_BUILTIN_F (BUILT_IN_TANH)
9385 CASE_BUILTIN_F (BUILT_IN_TRUNC)
9386 /* True if the 1st argument is nonnegative. */
9387 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9388
9389 CASE_BUILTIN_F (BUILT_IN_FMAX)
9390 /* True if the 1st OR 2nd arguments are nonnegative. */
9391 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9392 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9393
9394 CASE_BUILTIN_F (BUILT_IN_FMIN)
9395 /* True if the 1st AND 2nd arguments are nonnegative. */
9396 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9397 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9398
9399 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
9400 /* True if the 2nd argument is nonnegative. */
9401 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9402
9403 default:
9404 break;
9405 #undef CASE_BUILTIN_F
9406 #undef CASE_BUILTIN_I
9407 }
9408 }
9409
9410 /* ... fall through ... */
9411
9412 default:
9413 if (truth_value_p (TREE_CODE (t)))
9414 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9415 return 1;
9416 }
9417
9418 /* We don't know sign of `t', so be conservative and return false. */
9419 return 0;
9420 }
9421
9422 /* Return true when T is an address and is known to be nonzero.
9423 For floating point we further ensure that T is not denormal.
9424 Similar logic is present in nonzero_address in rtlanal.h */
9425
9426 static bool
9427 tree_expr_nonzero_p (tree t)
9428 {
9429 tree type = TREE_TYPE (t);
9430
9431 /* Doing something useful for floating point would need more work. */
9432 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9433 return false;
9434
9435 switch (TREE_CODE (t))
9436 {
9437 case ABS_EXPR:
9438 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9439 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9440
9441 case INTEGER_CST:
9442 return !integer_zerop (t);
9443
9444 case PLUS_EXPR:
9445 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9446 {
9447 /* With the presence of negative values it is hard
9448 to say something. */
9449 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9450 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9451 return false;
9452 /* One of operands must be positive and the other non-negative. */
9453 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9454 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9455 }
9456 break;
9457
9458 case MULT_EXPR:
9459 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9460 {
9461 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9462 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9463 }
9464 break;
9465
9466 case NOP_EXPR:
9467 {
9468 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9469 tree outer_type = TREE_TYPE (t);
9470
9471 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
9472 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
9473 }
9474 break;
9475
9476 case ADDR_EXPR:
9477 /* Weak declarations may link to NULL. */
9478 if (DECL_P (TREE_OPERAND (t, 0)))
9479 return !DECL_WEAK (TREE_OPERAND (t, 0));
9480 /* Constants and all other cases are never weak. */
9481 return true;
9482
9483 case COND_EXPR:
9484 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9485 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
9486
9487 case MIN_EXPR:
9488 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9489 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9490
9491 case MAX_EXPR:
9492 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
9493 {
9494 /* When both operands are nonzero, then MAX must be too. */
9495 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
9496 return true;
9497
9498 /* MAX where operand 0 is positive is positive. */
9499 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9500 }
9501 /* MAX where operand 1 is positive is positive. */
9502 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9503 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9504 return true;
9505 break;
9506
9507 case COMPOUND_EXPR:
9508 case MODIFY_EXPR:
9509 case BIND_EXPR:
9510 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
9511
9512 case SAVE_EXPR:
9513 case NON_LVALUE_EXPR:
9514 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9515
9516 case BIT_IOR_EXPR:
9517 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9518 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9519
9520 default:
9521 break;
9522 }
9523 return false;
9524 }
9525
9526 /* Return true if `r' is known to be non-negative.
9527 Only handles constants at the moment. */
9528
9529 int
9530 rtl_expr_nonnegative_p (rtx r)
9531 {
9532 switch (GET_CODE (r))
9533 {
9534 case CONST_INT:
9535 return INTVAL (r) >= 0;
9536
9537 case CONST_DOUBLE:
9538 if (GET_MODE (r) == VOIDmode)
9539 return CONST_DOUBLE_HIGH (r) >= 0;
9540 return 0;
9541
9542 case CONST_VECTOR:
9543 {
9544 int units, i;
9545 rtx elt;
9546
9547 units = CONST_VECTOR_NUNITS (r);
9548
9549 for (i = 0; i < units; ++i)
9550 {
9551 elt = CONST_VECTOR_ELT (r, i);
9552 if (!rtl_expr_nonnegative_p (elt))
9553 return 0;
9554 }
9555
9556 return 1;
9557 }
9558
9559 case SYMBOL_REF:
9560 case LABEL_REF:
9561 /* These are always nonnegative. */
9562 return 1;
9563
9564 default:
9565 return 0;
9566 }
9567 }
9568
9569
9570 /* See if we are applying CODE, a relational to the highest or lowest
9571 possible integer of TYPE. If so, then the result is a compile
9572 time constant. */
9573
9574 static tree
9575 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
9576 tree *op1_p)
9577 {
9578 tree op0 = *op0_p;
9579 tree op1 = *op1_p;
9580 enum tree_code code = *code_p;
9581 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
9582
9583 if (TREE_CODE (op1) == INTEGER_CST
9584 && ! TREE_CONSTANT_OVERFLOW (op1)
9585 && width <= HOST_BITS_PER_WIDE_INT
9586 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
9587 || POINTER_TYPE_P (TREE_TYPE (op1))))
9588 {
9589 unsigned HOST_WIDE_INT signed_max;
9590 unsigned HOST_WIDE_INT max, min;
9591
9592 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
9593
9594 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
9595 {
9596 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9597 min = 0;
9598 }
9599 else
9600 {
9601 max = signed_max;
9602 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9603 }
9604
9605 if (TREE_INT_CST_HIGH (op1) == 0
9606 && TREE_INT_CST_LOW (op1) == max)
9607 switch (code)
9608 {
9609 case GT_EXPR:
9610 return omit_one_operand (type, integer_zero_node, op0);
9611
9612 case GE_EXPR:
9613 *code_p = EQ_EXPR;
9614 break;
9615 case LE_EXPR:
9616 return omit_one_operand (type, integer_one_node, op0);
9617
9618 case LT_EXPR:
9619 *code_p = NE_EXPR;
9620 break;
9621
9622 /* The GE_EXPR and LT_EXPR cases above are not normally
9623 reached because of previous transformations. */
9624
9625 default:
9626 break;
9627 }
9628 else if (TREE_INT_CST_HIGH (op1) == 0
9629 && TREE_INT_CST_LOW (op1) == max - 1)
9630 switch (code)
9631 {
9632 case GT_EXPR:
9633 *code_p = EQ_EXPR;
9634 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9635 break;
9636 case LE_EXPR:
9637 *code_p = NE_EXPR;
9638 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9639 break;
9640 default:
9641 break;
9642 }
9643 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9644 && TREE_INT_CST_LOW (op1) == min)
9645 switch (code)
9646 {
9647 case LT_EXPR:
9648 return omit_one_operand (type, integer_zero_node, op0);
9649
9650 case LE_EXPR:
9651 *code_p = EQ_EXPR;
9652 break;
9653
9654 case GE_EXPR:
9655 return omit_one_operand (type, integer_one_node, op0);
9656
9657 case GT_EXPR:
9658 *code_p = NE_EXPR;
9659 break;
9660
9661 default:
9662 break;
9663 }
9664 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9665 && TREE_INT_CST_LOW (op1) == min + 1)
9666 switch (code)
9667 {
9668 case GE_EXPR:
9669 *code_p = NE_EXPR;
9670 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9671 break;
9672 case LT_EXPR:
9673 *code_p = EQ_EXPR;
9674 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9675 break;
9676 default:
9677 break;
9678 }
9679
9680 else if (TREE_INT_CST_HIGH (op1) == 0
9681 && TREE_INT_CST_LOW (op1) == signed_max
9682 && TYPE_UNSIGNED (TREE_TYPE (op1))
9683 /* signed_type does not work on pointer types. */
9684 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
9685 {
9686 /* The following case also applies to X < signed_max+1
9687 and X >= signed_max+1 because previous transformations. */
9688 if (code == LE_EXPR || code == GT_EXPR)
9689 {
9690 tree st0, st1, exp, retval;
9691 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
9692 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
9693
9694 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9695 type,
9696 fold_convert (st0, op0),
9697 fold_convert (st1, integer_zero_node));
9698
9699 retval
9700 = nondestructive_fold_binary_to_constant (TREE_CODE (exp),
9701 TREE_TYPE (exp),
9702 TREE_OPERAND (exp, 0),
9703 TREE_OPERAND (exp, 1));
9704
9705 /* If we are in gimple form, then returning EXP would create
9706 non-gimple expressions. Clearing it is safe and insures
9707 we do not allow a non-gimple expression to escape. */
9708 if (in_gimple_form)
9709 exp = NULL;
9710
9711 return (retval ? retval : exp);
9712 }
9713 }
9714 }
9715
9716 return NULL_TREE;
9717 }
9718
9719
9720 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
9721 attempt to fold the expression to a constant without modifying TYPE,
9722 OP0 or OP1.
9723
9724 If the expression could be simplified to a constant, then return
9725 the constant. If the expression would not be simplified to a
9726 constant, then return NULL_TREE.
9727
9728 Note this is primarily designed to be called after gimplification
9729 of the tree structures and when at least one operand is a constant.
9730 As a result of those simplifying assumptions this routine is far
9731 simpler than the generic fold routine. */
9732
9733 tree
9734 nondestructive_fold_binary_to_constant (enum tree_code code, tree type,
9735 tree op0, tree op1)
9736 {
9737 int wins = 1;
9738 tree subop0;
9739 tree subop1;
9740 tree tem;
9741
9742 /* If this is a commutative operation, and ARG0 is a constant, move it
9743 to ARG1 to reduce the number of tests below. */
9744 if (commutative_tree_code (code)
9745 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
9746 {
9747 tem = op0;
9748 op0 = op1;
9749 op1 = tem;
9750 }
9751
9752 /* If either operand is a complex type, extract its real component. */
9753 if (TREE_CODE (op0) == COMPLEX_CST)
9754 subop0 = TREE_REALPART (op0);
9755 else
9756 subop0 = op0;
9757
9758 if (TREE_CODE (op1) == COMPLEX_CST)
9759 subop1 = TREE_REALPART (op1);
9760 else
9761 subop1 = op1;
9762
9763 /* Note if either argument is not a real or integer constant.
9764 With a few exceptions, simplification is limited to cases
9765 where both arguments are constants. */
9766 if ((TREE_CODE (subop0) != INTEGER_CST
9767 && TREE_CODE (subop0) != REAL_CST)
9768 || (TREE_CODE (subop1) != INTEGER_CST
9769 && TREE_CODE (subop1) != REAL_CST))
9770 wins = 0;
9771
9772 switch (code)
9773 {
9774 case PLUS_EXPR:
9775 /* (plus (address) (const_int)) is a constant. */
9776 if (TREE_CODE (op0) == PLUS_EXPR
9777 && TREE_CODE (op1) == INTEGER_CST
9778 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
9779 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
9780 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
9781 == ADDR_EXPR)))
9782 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9783 {
9784 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
9785 const_binop (PLUS_EXPR, op1,
9786 TREE_OPERAND (op0, 1), 0));
9787 }
9788 case BIT_XOR_EXPR:
9789
9790 binary:
9791 if (!wins)
9792 return NULL_TREE;
9793
9794 /* Both arguments are constants. Simplify. */
9795 tem = const_binop (code, op0, op1, 0);
9796 if (tem != NULL_TREE)
9797 {
9798 /* The return value should always have the same type as
9799 the original expression. */
9800 if (TREE_TYPE (tem) != type)
9801 tem = fold_convert (type, tem);
9802
9803 return tem;
9804 }
9805 return NULL_TREE;
9806
9807 case MINUS_EXPR:
9808 /* Fold &x - &x. This can happen from &x.foo - &x.
9809 This is unsafe for certain floats even in non-IEEE formats.
9810 In IEEE, it is unsafe because it does wrong for NaNs.
9811 Also note that operand_equal_p is always false if an
9812 operand is volatile. */
9813 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
9814 return fold_convert (type, integer_zero_node);
9815
9816 goto binary;
9817
9818 case MULT_EXPR:
9819 case BIT_AND_EXPR:
9820 /* Special case multiplication or bitwise AND where one argument
9821 is zero. */
9822 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
9823 return omit_one_operand (type, op1, op0);
9824 else
9825 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
9826 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
9827 && real_zerop (op1))
9828 return omit_one_operand (type, op1, op0);
9829
9830 goto binary;
9831
9832 case BIT_IOR_EXPR:
9833 /* Special case when we know the result will be all ones. */
9834 if (integer_all_onesp (op1))
9835 return omit_one_operand (type, op1, op0);
9836
9837 goto binary;
9838
9839 case TRUNC_DIV_EXPR:
9840 case ROUND_DIV_EXPR:
9841 case FLOOR_DIV_EXPR:
9842 case CEIL_DIV_EXPR:
9843 case EXACT_DIV_EXPR:
9844 case TRUNC_MOD_EXPR:
9845 case ROUND_MOD_EXPR:
9846 case FLOOR_MOD_EXPR:
9847 case CEIL_MOD_EXPR:
9848 case RDIV_EXPR:
9849 /* Division by zero is undefined. */
9850 if (integer_zerop (op1))
9851 return NULL_TREE;
9852
9853 if (TREE_CODE (op1) == REAL_CST
9854 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
9855 && real_zerop (op1))
9856 return NULL_TREE;
9857
9858 goto binary;
9859
9860 case MIN_EXPR:
9861 if (INTEGRAL_TYPE_P (type)
9862 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
9863 return omit_one_operand (type, op1, op0);
9864
9865 goto binary;
9866
9867 case MAX_EXPR:
9868 if (INTEGRAL_TYPE_P (type)
9869 && TYPE_MAX_VALUE (type)
9870 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
9871 return omit_one_operand (type, op1, op0);
9872
9873 goto binary;
9874
9875 case RSHIFT_EXPR:
9876 /* Optimize -1 >> x for arithmetic right shifts. */
9877 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
9878 return omit_one_operand (type, op0, op1);
9879 /* ... fall through ... */
9880
9881 case LSHIFT_EXPR:
9882 if (integer_zerop (op0))
9883 return omit_one_operand (type, op0, op1);
9884
9885 /* Since negative shift count is not well-defined, don't
9886 try to compute it in the compiler. */
9887 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
9888 return NULL_TREE;
9889
9890 goto binary;
9891
9892 case LROTATE_EXPR:
9893 case RROTATE_EXPR:
9894 /* -1 rotated either direction by any amount is still -1. */
9895 if (integer_all_onesp (op0))
9896 return omit_one_operand (type, op0, op1);
9897
9898 /* 0 rotated either direction by any amount is still zero. */
9899 if (integer_zerop (op0))
9900 return omit_one_operand (type, op0, op1);
9901
9902 goto binary;
9903
9904 case COMPLEX_EXPR:
9905 if (wins)
9906 return build_complex (type, op0, op1);
9907 return NULL_TREE;
9908
9909 case LT_EXPR:
9910 case LE_EXPR:
9911 case GT_EXPR:
9912 case GE_EXPR:
9913 case EQ_EXPR:
9914 case NE_EXPR:
9915 /* If one arg is a real or integer constant, put it last. */
9916 if ((TREE_CODE (op0) == INTEGER_CST
9917 && TREE_CODE (op1) != INTEGER_CST)
9918 || (TREE_CODE (op0) == REAL_CST
9919 && TREE_CODE (op0) != REAL_CST))
9920 {
9921 tree temp;
9922
9923 temp = op0;
9924 op0 = op1;
9925 op1 = temp;
9926 code = swap_tree_comparison (code);
9927 }
9928
9929 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9930 This transformation affects the cases which are handled in later
9931 optimizations involving comparisons with non-negative constants. */
9932 if (TREE_CODE (op1) == INTEGER_CST
9933 && TREE_CODE (op0) != INTEGER_CST
9934 && tree_int_cst_sgn (op1) > 0)
9935 {
9936 switch (code)
9937 {
9938 case GE_EXPR:
9939 code = GT_EXPR;
9940 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9941 break;
9942
9943 case LT_EXPR:
9944 code = LE_EXPR;
9945 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9946 break;
9947
9948 default:
9949 break;
9950 }
9951 }
9952
9953 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
9954 if (tem)
9955 return tem;
9956
9957 /* Fall through. */
9958
9959 case ORDERED_EXPR:
9960 case UNORDERED_EXPR:
9961 case UNLT_EXPR:
9962 case UNLE_EXPR:
9963 case UNGT_EXPR:
9964 case UNGE_EXPR:
9965 case UNEQ_EXPR:
9966 case LTGT_EXPR:
9967 if (!wins)
9968 return NULL_TREE;
9969
9970 return fold_relational_const (code, type, op0, op1);
9971
9972 case RANGE_EXPR:
9973 /* This could probably be handled. */
9974 return NULL_TREE;
9975
9976 case TRUTH_AND_EXPR:
9977 /* If second arg is constant zero, result is zero, but first arg
9978 must be evaluated. */
9979 if (integer_zerop (op1))
9980 return omit_one_operand (type, op1, op0);
9981 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
9982 case will be handled here. */
9983 if (integer_zerop (op0))
9984 return omit_one_operand (type, op0, op1);
9985 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
9986 return constant_boolean_node (true, type);
9987 return NULL_TREE;
9988
9989 case TRUTH_OR_EXPR:
9990 /* If second arg is constant true, result is true, but we must
9991 evaluate first arg. */
9992 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
9993 return omit_one_operand (type, op1, op0);
9994 /* Likewise for first arg, but note this only occurs here for
9995 TRUTH_OR_EXPR. */
9996 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
9997 return omit_one_operand (type, op0, op1);
9998 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
9999 return constant_boolean_node (false, type);
10000 return NULL_TREE;
10001
10002 case TRUTH_XOR_EXPR:
10003 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10004 {
10005 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
10006 return constant_boolean_node (x, type);
10007 }
10008 return NULL_TREE;
10009
10010 default:
10011 return NULL_TREE;
10012 }
10013 }
10014
10015 /* Given the components of a unary expression CODE, TYPE and OP0,
10016 attempt to fold the expression to a constant without modifying
10017 TYPE or OP0.
10018
10019 If the expression could be simplified to a constant, then return
10020 the constant. If the expression would not be simplified to a
10021 constant, then return NULL_TREE.
10022
10023 Note this is primarily designed to be called after gimplification
10024 of the tree structures and when op0 is a constant. As a result
10025 of those simplifying assumptions this routine is far simpler than
10026 the generic fold routine. */
10027
10028 tree
10029 nondestructive_fold_unary_to_constant (enum tree_code code, tree type,
10030 tree op0)
10031 {
10032 /* Make sure we have a suitable constant argument. */
10033 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
10034 {
10035 tree subop;
10036
10037 if (TREE_CODE (op0) == COMPLEX_CST)
10038 subop = TREE_REALPART (op0);
10039 else
10040 subop = op0;
10041
10042 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
10043 return NULL_TREE;
10044 }
10045
10046 switch (code)
10047 {
10048 case NOP_EXPR:
10049 case FLOAT_EXPR:
10050 case CONVERT_EXPR:
10051 case FIX_TRUNC_EXPR:
10052 case FIX_FLOOR_EXPR:
10053 case FIX_CEIL_EXPR:
10054 return fold_convert_const (code, type, op0);
10055
10056 case NEGATE_EXPR:
10057 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10058 return fold_negate_const (op0, type);
10059 else
10060 return NULL_TREE;
10061
10062 case ABS_EXPR:
10063 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10064 return fold_abs_const (op0, type);
10065 else
10066 return NULL_TREE;
10067
10068 case BIT_NOT_EXPR:
10069 if (TREE_CODE (op0) == INTEGER_CST)
10070 return fold_not_const (op0, type);
10071 else
10072 return NULL_TREE;
10073
10074 case REALPART_EXPR:
10075 if (TREE_CODE (op0) == COMPLEX_CST)
10076 return TREE_REALPART (op0);
10077 else
10078 return NULL_TREE;
10079
10080 case IMAGPART_EXPR:
10081 if (TREE_CODE (op0) == COMPLEX_CST)
10082 return TREE_IMAGPART (op0);
10083 else
10084 return NULL_TREE;
10085
10086 case CONJ_EXPR:
10087 if (TREE_CODE (op0) == COMPLEX_CST
10088 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
10089 return build_complex (type, TREE_REALPART (op0),
10090 negate_expr (TREE_IMAGPART (op0)));
10091 return NULL_TREE;
10092
10093 default:
10094 return NULL_TREE;
10095 }
10096 }
10097
10098 /* If EXP represents referencing an element in a constant string
10099 (either via pointer arithmetic or array indexing), return the
10100 tree representing the value accessed, otherwise return NULL. */
10101
10102 tree
10103 fold_read_from_constant_string (tree exp)
10104 {
10105 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
10106 {
10107 tree exp1 = TREE_OPERAND (exp, 0);
10108 tree index;
10109 tree string;
10110
10111 if (TREE_CODE (exp) == INDIRECT_REF)
10112 string = string_constant (exp1, &index);
10113 else
10114 {
10115 tree low_bound = array_ref_low_bound (exp);
10116 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
10117
10118 /* Optimize the special-case of a zero lower bound.
10119
10120 We convert the low_bound to sizetype to avoid some problems
10121 with constant folding. (E.g. suppose the lower bound is 1,
10122 and its mode is QI. Without the conversion,l (ARRAY
10123 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
10124 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
10125 if (! integer_zerop (low_bound))
10126 index = size_diffop (index, fold_convert (sizetype, low_bound));
10127
10128 string = exp1;
10129 }
10130
10131 if (string
10132 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
10133 && TREE_CODE (string) == STRING_CST
10134 && TREE_CODE (index) == INTEGER_CST
10135 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
10136 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
10137 == MODE_INT)
10138 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
10139 return fold_convert (TREE_TYPE (exp),
10140 build_int_2 ((TREE_STRING_POINTER (string)
10141 [TREE_INT_CST_LOW (index)]), 0));
10142 }
10143 return NULL;
10144 }
10145
10146 /* Return the tree for neg (ARG0) when ARG0 is known to be either
10147 an integer constant or real constant.
10148
10149 TYPE is the type of the result. */
10150
10151 static tree
10152 fold_negate_const (tree arg0, tree type)
10153 {
10154 tree t = NULL_TREE;
10155
10156 if (TREE_CODE (arg0) == INTEGER_CST)
10157 {
10158 unsigned HOST_WIDE_INT low;
10159 HOST_WIDE_INT high;
10160 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10161 TREE_INT_CST_HIGH (arg0),
10162 &low, &high);
10163 t = build_int_2 (low, high);
10164 TREE_TYPE (t) = type;
10165 TREE_OVERFLOW (t)
10166 = (TREE_OVERFLOW (arg0)
10167 | force_fit_type (t, overflow && !TYPE_UNSIGNED (type)));
10168 TREE_CONSTANT_OVERFLOW (t)
10169 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
10170 }
10171 else if (TREE_CODE (arg0) == REAL_CST)
10172 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10173 #ifdef ENABLE_CHECKING
10174 else
10175 abort ();
10176 #endif
10177
10178 return t;
10179 }
10180
10181 /* Return the tree for abs (ARG0) when ARG0 is known to be either
10182 an integer constant or real constant.
10183
10184 TYPE is the type of the result. */
10185
10186 tree
10187 fold_abs_const (tree arg0, tree type)
10188 {
10189 tree t = NULL_TREE;
10190
10191 if (TREE_CODE (arg0) == INTEGER_CST)
10192 {
10193 /* If the value is unsigned, then the absolute value is
10194 the same as the ordinary value. */
10195 if (TYPE_UNSIGNED (type))
10196 return arg0;
10197 /* Similarly, if the value is non-negative. */
10198 else if (INT_CST_LT (integer_minus_one_node, arg0))
10199 return arg0;
10200 /* If the value is negative, then the absolute value is
10201 its negation. */
10202 else
10203 {
10204 unsigned HOST_WIDE_INT low;
10205 HOST_WIDE_INT high;
10206 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10207 TREE_INT_CST_HIGH (arg0),
10208 &low, &high);
10209 t = build_int_2 (low, high);
10210 TREE_TYPE (t) = type;
10211 TREE_OVERFLOW (t)
10212 = (TREE_OVERFLOW (arg0)
10213 | force_fit_type (t, overflow));
10214 TREE_CONSTANT_OVERFLOW (t)
10215 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
10216 return t;
10217 }
10218 }
10219 else if (TREE_CODE (arg0) == REAL_CST)
10220 {
10221 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
10222 return build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10223 else
10224 return arg0;
10225 }
10226 #ifdef ENABLE_CHECKING
10227 else
10228 abort ();
10229 #endif
10230
10231 return t;
10232 }
10233
10234 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
10235 constant. TYPE is the type of the result. */
10236
10237 static tree
10238 fold_not_const (tree arg0, tree type)
10239 {
10240 tree t = NULL_TREE;
10241
10242 if (TREE_CODE (arg0) == INTEGER_CST)
10243 {
10244 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
10245 ~ TREE_INT_CST_HIGH (arg0));
10246 TREE_TYPE (t) = type;
10247 force_fit_type (t, 0);
10248 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
10249 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
10250 }
10251 #ifdef ENABLE_CHECKING
10252 else
10253 abort ();
10254 #endif
10255
10256 return t;
10257 }
10258
10259 /* Given CODE, a relational operator, the target type, TYPE and two
10260 constant operands OP0 and OP1, return the result of the
10261 relational operation. If the result is not a compile time
10262 constant, then return NULL_TREE. */
10263
10264 static tree
10265 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
10266 {
10267 int result, invert;
10268
10269 /* From here on, the only cases we handle are when the result is
10270 known to be a constant. */
10271
10272 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
10273 {
10274 /* Handle the cases where either operand is a NaN. */
10275 if (REAL_VALUE_ISNAN (TREE_REAL_CST (op0))
10276 || REAL_VALUE_ISNAN (TREE_REAL_CST (op1)))
10277 {
10278 switch (code)
10279 {
10280 case EQ_EXPR:
10281 case ORDERED_EXPR:
10282 result = 0;
10283 break;
10284
10285 case NE_EXPR:
10286 case UNORDERED_EXPR:
10287 case UNLT_EXPR:
10288 case UNLE_EXPR:
10289 case UNGT_EXPR:
10290 case UNGE_EXPR:
10291 case UNEQ_EXPR:
10292 result = 1;
10293 break;
10294
10295 case LT_EXPR:
10296 case LE_EXPR:
10297 case GT_EXPR:
10298 case GE_EXPR:
10299 case LTGT_EXPR:
10300 if (flag_trapping_math)
10301 return NULL_TREE;
10302 result = 0;
10303 break;
10304
10305 default:
10306 abort ();
10307 }
10308
10309 return constant_boolean_node (result, type);
10310 }
10311
10312 /* From here on we're sure there are no NaNs. */
10313 switch (code)
10314 {
10315 case ORDERED_EXPR:
10316 return constant_boolean_node (true, type);
10317
10318 case UNORDERED_EXPR:
10319 return constant_boolean_node (false, type);
10320
10321 case UNLT_EXPR:
10322 code = LT_EXPR;
10323 break;
10324 case UNLE_EXPR:
10325 code = LE_EXPR;
10326 break;
10327 case UNGT_EXPR:
10328 code = GT_EXPR;
10329 break;
10330 case UNGE_EXPR:
10331 code = GE_EXPR;
10332 break;
10333 case UNEQ_EXPR:
10334 code = EQ_EXPR;
10335 break;
10336 case LTGT_EXPR:
10337 code = NE_EXPR;
10338 break;
10339
10340 default:
10341 break;
10342 }
10343 }
10344
10345 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
10346
10347 To compute GT, swap the arguments and do LT.
10348 To compute GE, do LT and invert the result.
10349 To compute LE, swap the arguments, do LT and invert the result.
10350 To compute NE, do EQ and invert the result.
10351
10352 Therefore, the code below must handle only EQ and LT. */
10353
10354 if (code == LE_EXPR || code == GT_EXPR)
10355 {
10356 tree tem = op0;
10357 op0 = op1;
10358 op1 = tem;
10359 code = swap_tree_comparison (code);
10360 }
10361
10362 /* Note that it is safe to invert for real values here because we
10363 have already handled the one case that it matters. */
10364
10365 invert = 0;
10366 if (code == NE_EXPR || code == GE_EXPR)
10367 {
10368 invert = 1;
10369 code = invert_tree_comparison (code, false);
10370 }
10371
10372 /* Compute a result for LT or EQ if args permit;
10373 Otherwise return T. */
10374 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10375 {
10376 if (code == EQ_EXPR)
10377 result = tree_int_cst_equal (op0, op1);
10378 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
10379 result = INT_CST_LT_UNSIGNED (op0, op1);
10380 else
10381 result = INT_CST_LT (op0, op1);
10382 }
10383
10384 else if (code == EQ_EXPR && !TREE_SIDE_EFFECTS (op0)
10385 && integer_zerop (op1) && tree_expr_nonzero_p (op0))
10386 result = 0;
10387
10388 /* Two real constants can be compared explicitly. */
10389 else if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
10390 {
10391 if (code == EQ_EXPR)
10392 result = REAL_VALUES_EQUAL (TREE_REAL_CST (op0),
10393 TREE_REAL_CST (op1));
10394 else
10395 result = REAL_VALUES_LESS (TREE_REAL_CST (op0),
10396 TREE_REAL_CST (op1));
10397 }
10398 else
10399 return NULL_TREE;
10400
10401 if (invert)
10402 result ^= 1;
10403 return constant_boolean_node (result, type);
10404 }
10405
10406 /* Build an expression for the address of T. Folds away INDIRECT_REF to
10407 avoid confusing the gimplify process. */
10408
10409 tree
10410 build_fold_addr_expr_with_type (tree t, tree ptrtype)
10411 {
10412 if (TREE_CODE (t) == INDIRECT_REF)
10413 {
10414 t = TREE_OPERAND (t, 0);
10415 if (TREE_TYPE (t) != ptrtype)
10416 t = build1 (NOP_EXPR, ptrtype, t);
10417 }
10418 else
10419 {
10420 tree base = t;
10421 while (TREE_CODE (base) == COMPONENT_REF
10422 || TREE_CODE (base) == ARRAY_REF)
10423 base = TREE_OPERAND (base, 0);
10424 if (DECL_P (base))
10425 TREE_ADDRESSABLE (base) = 1;
10426
10427 t = build1 (ADDR_EXPR, ptrtype, t);
10428 }
10429
10430 return t;
10431 }
10432
10433 tree
10434 build_fold_addr_expr (tree t)
10435 {
10436 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
10437 }
10438
10439 /* Builds an expression for an indirection through T, simplifying some
10440 cases. */
10441
10442 tree
10443 build_fold_indirect_ref (tree t)
10444 {
10445 tree type = TREE_TYPE (TREE_TYPE (t));
10446 tree sub = t;
10447 tree subtype;
10448
10449 STRIP_NOPS (sub);
10450 if (TREE_CODE (sub) == ADDR_EXPR)
10451 {
10452 tree op = TREE_OPERAND (sub, 0);
10453 tree optype = TREE_TYPE (op);
10454 /* *&p => p */
10455 if (lang_hooks.types_compatible_p (type, optype))
10456 return op;
10457 /* *(foo *)&fooarray => fooarray[0] */
10458 else if (TREE_CODE (optype) == ARRAY_TYPE
10459 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
10460 return build4 (ARRAY_REF, type, op, size_zero_node, NULL_TREE, NULL_TREE);
10461 }
10462
10463 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
10464 subtype = TREE_TYPE (sub);
10465 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
10466 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
10467 {
10468 sub = build_fold_indirect_ref (sub);
10469 return build4 (ARRAY_REF, type, sub, size_zero_node, NULL_TREE, NULL_TREE);
10470 }
10471
10472 return build1 (INDIRECT_REF, type, t);
10473 }
10474
10475 #include "gt-fold-const.h"